Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorMike Greiling <mike@pixelcog.com>2017-10-23 10:32:14 +0300
committerMike Greiling <mike@pixelcog.com>2017-10-23 10:32:14 +0300
commit519ffa1ebf6eff8c7fee1e7aa919b44c1acfeb36 (patch)
treee2a96f675fbf8fed86568b64723b856ebd5012d9 /spec
parent1e78c627d4c6630df3bbfd4e905018314d692b6d (diff)
parent5d74973d8148548f505478ed773795d377e6f0d6 (diff)
Merge branch 'master' into sh-headless-chrome-support
* master: (297 commits) Fix deletion of container registry or images returning an error The fog-aliyun gem had a bug in v0.1.0 for file storage creation/update. This merge requests update the gem to v0.2.0 which contains the fix: Decrease ABC threshold to 54.28 Update VERSION to 10.2.0-pre Update CHANGELOG.md for 10.1.0 Document `CI_SHARED_ENVIRONMENT` and `CI_DISPOSABLE_ENVIRONMENT` Fix the external URLs generated for online view of HTML artifacts Use title as placeholder instead of issue title for reusability Fix failure in current_settings_spec.rb Clarify the difference between project_update and project_rename URI decode Page-Title header to preserve UTF-8 characters Update Gitaly version to v0.49.0 Decrease Perceived Complexity threshold to 14 Resolve "Remove help text regarding group issues on group issues page (and group merge requests page)" Force non diff resolved discussion to display when collapse toggled Added submodule support in multi-file editor add note about after_script being run separately Check for element before evaluate_script Merge branch 'master-i18n' into 'master' Update Prometheus gem to fix problems with other files overwriting current file ...
Diffstat (limited to 'spec')
-rw-r--r--spec/bin/changelog_spec.rb2
-rw-r--r--spec/controllers/application_controller_spec.rb14
-rw-r--r--spec/controllers/concerns/group_tree_spec.rb89
-rw-r--r--spec/controllers/dashboard/groups_controller_spec.rb23
-rw-r--r--spec/controllers/explore/groups_controller_spec.rb23
-rw-r--r--spec/controllers/groups/children_controller_spec.rb286
-rw-r--r--spec/controllers/groups_controller_spec.rb133
-rw-r--r--spec/controllers/profiles_controller_spec.rb44
-rw-r--r--spec/controllers/projects/commits_controller_spec.rb31
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb57
-rw-r--r--spec/controllers/projects/merge_requests/conflicts_controller_spec.rb8
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb32
-rw-r--r--spec/controllers/projects_controller_spec.rb8
-rw-r--r--spec/features/admin/admin_health_check_spec.rb4
-rw-r--r--spec/features/boards/boards_spec.rb4
-rw-r--r--spec/features/boards/sidebar_spec.rb32
-rw-r--r--spec/features/ci_lint_spec.rb1
-rw-r--r--spec/features/dashboard/groups_list_spec.rb81
-rw-r--r--spec/features/dashboard/issues_spec.rb4
-rw-r--r--spec/features/explore/groups_list_spec.rb13
-rw-r--r--spec/features/groups/show_spec.rb31
-rw-r--r--spec/features/groups_spec.rb23
-rw-r--r--spec/features/issues/issue_detail_spec.rb2
-rw-r--r--spec/features/issues/markdown_toolbar_spec.rb2
-rw-r--r--spec/features/merge_requests/conflicts_spec.rb3
-rw-r--r--spec/features/merge_requests/diff_notes_resolve_spec.rb29
-rw-r--r--spec/features/merge_requests/edit_mr_spec.rb1
-rw-r--r--spec/features/merge_requests/mini_pipeline_graph_spec.rb2
-rw-r--r--spec/features/projects/blobs/edit_spec.rb1
-rw-r--r--spec/features/projects/issuable_templates_spec.rb4
-rw-r--r--spec/features/projects/user_creates_files_spec.rb6
-rw-r--r--spec/features/projects/user_edits_files_spec.rb7
-rw-r--r--spec/features/projects/wiki/markdown_preview_spec.rb1
-rw-r--r--spec/features/projects/wiki/shortcuts_spec.rb4
-rw-r--r--spec/features/projects/wiki/user_git_access_wiki_page_spec.rb9
-rw-r--r--spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb7
-rw-r--r--spec/features/projects_spec.rb2
-rw-r--r--spec/finders/group_descendants_finder_spec.rb166
-rw-r--r--spec/fixtures/api/schemas/registry/tag.json5
-rw-r--r--spec/fixtures/pages.tar.gzbin1795 -> 1884 bytes
-rw-r--r--spec/fixtures/pages.zipbin1851 -> 2338 bytes
-rw-r--r--spec/helpers/application_helper_spec.rb8
-rw-r--r--spec/initializers/settings_spec.rb20
-rw-r--r--spec/javascripts/blob/blob_file_dropzone_spec.js1
-rw-r--r--spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js23
-rw-r--r--spec/javascripts/flash_spec.js21
-rw-r--r--spec/javascripts/groups/components/app_spec.js443
-rw-r--r--spec/javascripts/groups/components/group_folder_spec.js66
-rw-r--r--spec/javascripts/groups/components/group_item_spec.js177
-rw-r--r--spec/javascripts/groups/components/groups_spec.js70
-rw-r--r--spec/javascripts/groups/components/item_actions_spec.js110
-rw-r--r--spec/javascripts/groups/components/item_caret_spec.js40
-rw-r--r--spec/javascripts/groups/components/item_stats_spec.js159
-rw-r--r--spec/javascripts/groups/components/item_type_icon_spec.js54
-rw-r--r--spec/javascripts/groups/group_item_spec.js102
-rw-r--r--spec/javascripts/groups/groups_spec.js99
-rw-r--r--spec/javascripts/groups/mock_data.js470
-rw-r--r--spec/javascripts/groups/service/groups_service_spec.js42
-rw-r--r--spec/javascripts/groups/store/groups_store_spec.js110
-rw-r--r--spec/javascripts/helpers/set_timeout_promise_helper.js3
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js11
-rw-r--r--spec/javascripts/issue_show/components/title_spec.js39
-rw-r--r--spec/javascripts/jobs/header_spec.js7
-rw-r--r--spec/javascripts/lib/utils/common_utils_spec.js26
-rw-r--r--spec/javascripts/lib/utils/datefix_spec.js29
-rw-r--r--spec/javascripts/registry/mock_data.js8
-rw-r--r--spec/javascripts/repo/components/repo_commit_section_spec.js172
-rw-r--r--spec/javascripts/repo/components/repo_edit_button_spec.js12
-rw-r--r--spec/javascripts/repo/components/repo_editor_spec.js5
-rw-r--r--spec/javascripts/repo/components/repo_file_buttons_spec.js4
-rw-r--r--spec/javascripts/repo/components/repo_file_options_spec.js33
-rw-r--r--spec/javascripts/repo/components/repo_file_spec.js125
-rw-r--r--spec/javascripts/repo/components/repo_loading_file_spec.js29
-rw-r--r--spec/javascripts/repo/components/repo_prev_directory_spec.js16
-rw-r--r--spec/javascripts/repo/components/repo_sidebar_spec.js132
-rw-r--r--spec/javascripts/repo/components/repo_tab_spec.js40
-rw-r--r--spec/javascripts/repo/components/repo_tabs_spec.js18
-rw-r--r--spec/javascripts/repo/mock_data.js14
-rw-r--r--spec/javascripts/shortcuts_issuable_spec.js4
-rw-r--r--spec/javascripts/shortcuts_spec.js11
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js84
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar/user_avatar_link_spec.js (renamed from spec/javascripts/vue_shared/components/user_avatar_link_spec.js)0
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar/user_avatar_svg_spec.js (renamed from spec/javascripts/vue_shared/components/user_avatar_svg_spec.js)0
-rw-r--r--spec/javascripts/vue_shared/components/user_avatar_image_spec.js54
-rw-r--r--spec/javascripts/zen_mode_spec.js3
-rw-r--r--spec/lib/banzai/filter/sanitization_filter_spec.rb5
-rw-r--r--spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb21
-rw-r--r--spec/lib/gitlab/backup/manager_spec.rb4
-rw-r--r--spec/lib/gitlab/conflict/file_collection_spec.rb2
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb18
-rw-r--r--spec/lib/gitlab/current_settings_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/parser_spec.rb17
-rw-r--r--spec/lib/gitlab/diff/position_spec.rb20
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb16
-rw-r--r--spec/lib/gitlab/git/conflict/parser_spec.rb (renamed from spec/lib/gitlab/conflict/parser_spec.rb)68
-rw-r--r--spec/lib/gitlab/git/env_spec.rb42
-rw-r--r--spec/lib/gitlab/git/popen_spec.rb132
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb39
-rw-r--r--spec/lib/gitlab/git/storage/circuit_breaker_spec.rb58
-rw-r--r--spec/lib/gitlab/git/storage/health_spec.rb30
-rw-r--r--spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb4
-rw-r--r--spec/lib/gitlab/github_import/wiki_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/group_hierarchy_spec.rb28
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml1
-rw-r--r--spec/lib/gitlab/multi_collection_paginator_spec.rb46
-rw-r--r--spec/lib/gitlab/path_regex_spec.rb14
-rw-r--r--spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb81
-rw-r--r--spec/lib/gitlab/saml/auth_hash_spec.rb40
-rw-r--r--spec/lib/gitlab/saml/user_spec.rb32
-rw-r--r--spec/lib/gitlab/sidekiq_status_spec.rb12
-rw-r--r--spec/lib/gitlab/sql/union_spec.rb7
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb2
-rw-r--r--spec/lib/gitlab/utils/merge_hash_spec.rb33
-rw-r--r--spec/migrations/migrate_user_project_view_spec.rb7
-rw-r--r--spec/models/application_setting_spec.rb38
-rw-r--r--spec/models/blob_viewer/readme_spec.rb2
-rw-r--r--spec/models/ci/artifact_blob_spec.rb7
-rw-r--r--spec/models/ci/build_spec.rb31
-rw-r--r--spec/models/concerns/group_descendant_spec.rb166
-rw-r--r--spec/models/concerns/loaded_in_group_list_spec.rb49
-rw-r--r--spec/models/diff_note_spec.rb2
-rw-r--r--spec/models/gcp/cluster_spec.rb24
-rw-r--r--spec/models/merge_request_spec.rb22
-rw-r--r--spec/models/namespace_spec.rb28
-rw-r--r--spec/models/project_services/microsoft_teams_service_spec.rb8
-rw-r--r--spec/models/project_spec.rb21
-rw-r--r--spec/models/project_wiki_spec.rb58
-rw-r--r--spec/models/repository_spec.rb30
-rw-r--r--spec/requests/api/branches_spec.rb36
-rw-r--r--spec/requests/api/settings_spec.rb5
-rw-r--r--spec/requests/api/v3/repositories_spec.rb11
-rw-r--r--spec/rubocop/cop/rspec/env_assignment_spec.rb59
-rw-r--r--spec/serializers/container_tag_entity_spec.rb2
-rw-r--r--spec/serializers/group_child_entity_spec.rb101
-rw-r--r--spec/serializers/group_child_serializer_spec.rb110
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb50
-rw-r--r--spec/services/ci/retry_build_service_spec.rb3
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb2
-rw-r--r--spec/services/merge_requests/conflicts/resolve_service_spec.rb33
-rw-r--r--spec/services/projects/destroy_service_spec.rb17
-rw-r--r--spec/services/projects/update_pages_service_spec.rb5
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb56
-rw-r--r--spec/services/system_note_service_spec.rb14
-rw-r--r--spec/support/board_helpers.rb16
-rw-r--r--spec/support/features/discussion_comments_shared_example.rb29
-rw-r--r--spec/support/ldap_helpers.rb5
-rw-r--r--spec/support/login_helpers.rb12
-rw-r--r--spec/support/redis_without_keys.rb8
-rw-r--r--spec/support/select2_helper.rb1
-rw-r--r--spec/support/shared_examples/requests/api/status_shared_examples.rb7
-rw-r--r--spec/support/slack_mattermost_notifications_shared_examples.rb3
-rw-r--r--spec/support/stub_configuration.rb4
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb215
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb9
-rw-r--r--spec/tasks/gitlab/ldap_rake_spec.rb2
155 files changed, 4845 insertions, 1327 deletions
diff --git a/spec/bin/changelog_spec.rb b/spec/bin/changelog_spec.rb
index 6d8b9865dcb..fc1bf67d7b9 100644
--- a/spec/bin/changelog_spec.rb
+++ b/spec/bin/changelog_spec.rb
@@ -84,7 +84,7 @@ describe 'bin/changelog' do
expect do
expect do
expect { described_class.read_type }.to raise_error(SystemExit)
- end.to output("Invalid category index, please select an index between 1 and 7\n").to_stderr
+ end.to output("Invalid category index, please select an index between 1 and 8\n").to_stderr
end.to output.to_stdout
end
end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 59a6cfbf4f5..0a3a0f7da18 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -221,6 +221,20 @@ describe ApplicationController do
end
end
+ describe '#set_page_title_header' do
+ let(:controller) { described_class.new }
+
+ it 'URI encodes UTF-8 characters in the title' do
+ response = double(headers: {})
+ allow_any_instance_of(PageLayoutHelper).to receive(:page_title).and_return('€100 · GitLab')
+ allow(controller).to receive(:response).and_return(response)
+
+ controller.send(:set_page_title_header)
+
+ expect(response.headers['Page-Title']).to eq('%E2%82%AC100%20%C2%B7%20GitLab')
+ end
+ end
+
context 'two-factor authentication' do
let(:controller) { described_class.new }
diff --git a/spec/controllers/concerns/group_tree_spec.rb b/spec/controllers/concerns/group_tree_spec.rb
new file mode 100644
index 00000000000..ba84fbf8564
--- /dev/null
+++ b/spec/controllers/concerns/group_tree_spec.rb
@@ -0,0 +1,89 @@
+require 'spec_helper'
+
+describe GroupTree do
+ let(:group) { create(:group, :public) }
+ let(:user) { create(:user) }
+
+ controller(ApplicationController) do
+ # `described_class` is not available in this context
+ include GroupTree # rubocop:disable RSpec/DescribedClass
+
+ def index
+ render_group_tree GroupsFinder.new(current_user).execute
+ end
+ end
+
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ describe 'GET #index' do
+ it 'filters groups' do
+ other_group = create(:group, name: 'filter')
+ other_group.add_owner(user)
+
+ get :index, filter: 'filt', format: :json
+
+ expect(assigns(:groups)).to contain_exactly(other_group)
+ end
+
+ context 'for subgroups', :nested_groups do
+ it 'only renders root groups when no parent was given' do
+ create(:group, :public, parent: group)
+
+ get :index, format: :json
+
+ expect(assigns(:groups)).to contain_exactly(group)
+ end
+
+ it 'contains only the subgroup when a parent was given' do
+ subgroup = create(:group, :public, parent: group)
+
+ get :index, parent_id: group.id, format: :json
+
+ expect(assigns(:groups)).to contain_exactly(subgroup)
+ end
+
+ it 'allows filtering for subgroups and includes the parents for rendering' do
+ subgroup = create(:group, :public, parent: group, name: 'filter')
+
+ get :index, filter: 'filt', format: :json
+
+ expect(assigns(:groups)).to contain_exactly(group, subgroup)
+ end
+
+ it 'does not include groups the user does not have access to' do
+ parent = create(:group, :private)
+ subgroup = create(:group, :private, parent: parent, name: 'filter')
+ subgroup.add_developer(user)
+ _other_subgroup = create(:group, :private, parent: parent, name: 'filte')
+
+ get :index, filter: 'filt', format: :json
+
+ expect(assigns(:groups)).to contain_exactly(parent, subgroup)
+ end
+ end
+
+ context 'json content' do
+ it 'shows groups as json' do
+ get :index, format: :json
+
+ expect(json_response.first['id']).to eq(group.id)
+ end
+
+ context 'nested groups', :nested_groups do
+ it 'expands the tree when filtering' do
+ subgroup = create(:group, :public, parent: group, name: 'filter')
+
+ get :index, filter: 'filt', format: :json
+
+ children_response = json_response.first['children']
+
+ expect(json_response.first['id']).to eq(group.id)
+ expect(children_response.first['id']).to eq(subgroup.id)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/dashboard/groups_controller_spec.rb b/spec/controllers/dashboard/groups_controller_spec.rb
new file mode 100644
index 00000000000..fb9d3efbac0
--- /dev/null
+++ b/spec/controllers/dashboard/groups_controller_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe Dashboard::GroupsController do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'renders group trees' do
+ expect(described_class).to include(GroupTree)
+ end
+
+ it 'only includes projects the user is a member of' do
+ member_of_group = create(:group)
+ member_of_group.add_developer(user)
+ create(:group, :public)
+
+ get :index
+
+ expect(assigns(:groups)).to contain_exactly(member_of_group)
+ end
+end
diff --git a/spec/controllers/explore/groups_controller_spec.rb b/spec/controllers/explore/groups_controller_spec.rb
new file mode 100644
index 00000000000..9e0ad9ea86f
--- /dev/null
+++ b/spec/controllers/explore/groups_controller_spec.rb
@@ -0,0 +1,23 @@
+require 'spec_helper'
+
+describe Explore::GroupsController do
+ let(:user) { create(:user) }
+
+ before do
+ sign_in(user)
+ end
+
+ it 'renders group trees' do
+ expect(described_class).to include(GroupTree)
+ end
+
+ it 'includes public projects' do
+ member_of_group = create(:group)
+ member_of_group.add_developer(user)
+ public_group = create(:group, :public)
+
+ get :index
+
+ expect(assigns(:groups)).to contain_exactly(member_of_group, public_group)
+ end
+end
diff --git a/spec/controllers/groups/children_controller_spec.rb b/spec/controllers/groups/children_controller_spec.rb
new file mode 100644
index 00000000000..4262d474e59
--- /dev/null
+++ b/spec/controllers/groups/children_controller_spec.rb
@@ -0,0 +1,286 @@
+require 'spec_helper'
+
+describe Groups::ChildrenController do
+ let(:group) { create(:group, :public) }
+ let(:user) { create(:user) }
+ let!(:group_member) { create(:group_member, group: group, user: user) }
+
+ describe 'GET #index' do
+ context 'for projects' do
+ let!(:public_project) { create(:project, :public, namespace: group) }
+ let!(:private_project) { create(:project, :private, namespace: group) }
+
+ context 'as a user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'shows all children' do
+ get :index, group_id: group.to_param, format: :json
+
+ expect(assigns(:children)).to contain_exactly(public_project, private_project)
+ end
+
+ context 'being member of private subgroup' do
+ it 'shows public and private children the user is member of' do
+ group_member.destroy!
+ private_project.add_guest(user)
+
+ get :index, group_id: group.to_param, format: :json
+
+ expect(assigns(:children)).to contain_exactly(public_project, private_project)
+ end
+ end
+ end
+
+ context 'as a guest' do
+ it 'shows the public children' do
+ get :index, group_id: group.to_param, format: :json
+
+ expect(assigns(:children)).to contain_exactly(public_project)
+ end
+ end
+ end
+
+ context 'for subgroups', :nested_groups do
+ let!(:public_subgroup) { create(:group, :public, parent: group) }
+ let!(:private_subgroup) { create(:group, :private, parent: group) }
+ let!(:public_project) { create(:project, :public, namespace: group) }
+ let!(:private_project) { create(:project, :private, namespace: group) }
+
+ context 'as a user' do
+ before do
+ sign_in(user)
+ end
+
+ it 'shows all children' do
+ get :index, group_id: group.to_param, format: :json
+
+ expect(assigns(:children)).to contain_exactly(public_subgroup, private_subgroup, public_project, private_project)
+ end
+
+ context 'being member of private subgroup' do
+ it 'shows public and private children the user is member of' do
+ group_member.destroy!
+ private_subgroup.add_guest(user)
+ private_project.add_guest(user)
+
+ get :index, group_id: group.to_param, format: :json
+
+ expect(assigns(:children)).to contain_exactly(public_subgroup, private_subgroup, public_project, private_project)
+ end
+ end
+ end
+
+ context 'as a guest' do
+ it 'shows the public children' do
+ get :index, group_id: group.to_param, format: :json
+
+ expect(assigns(:children)).to contain_exactly(public_subgroup, public_project)
+ end
+ end
+
+ context 'filtering children' do
+ it 'expands the tree for matching projects' do
+ project = create(:project, :public, namespace: public_subgroup, name: 'filterme')
+
+ get :index, group_id: group.to_param, filter: 'filter', format: :json
+
+ group_json = json_response.first
+ project_json = group_json['children'].first
+
+ expect(group_json['id']).to eq(public_subgroup.id)
+ expect(project_json['id']).to eq(project.id)
+ end
+
+ it 'expands the tree for matching subgroups' do
+ matched_group = create(:group, :public, parent: public_subgroup, name: 'filterme')
+
+ get :index, group_id: group.to_param, filter: 'filter', format: :json
+
+ group_json = json_response.first
+ matched_group_json = group_json['children'].first
+
+ expect(group_json['id']).to eq(public_subgroup.id)
+ expect(matched_group_json['id']).to eq(matched_group.id)
+ end
+
+ it 'merges the trees correctly' do
+ shared_subgroup = create(:group, :public, parent: group, path: 'hardware')
+ matched_project_1 = create(:project, :public, namespace: shared_subgroup, name: 'mobile-soc')
+
+ l2_subgroup = create(:group, :public, parent: shared_subgroup, path: 'broadcom')
+ l3_subgroup = create(:group, :public, parent: l2_subgroup, path: 'wifi-group')
+ matched_project_2 = create(:project, :public, namespace: l3_subgroup, name: 'mobile')
+
+ get :index, group_id: group.to_param, filter: 'mobile', format: :json
+
+ shared_group_json = json_response.first
+ expect(shared_group_json['id']).to eq(shared_subgroup.id)
+
+ matched_project_1_json = shared_group_json['children'].detect { |child| child['type'] == 'project' }
+ expect(matched_project_1_json['id']).to eq(matched_project_1.id)
+
+ l2_subgroup_json = shared_group_json['children'].detect { |child| child['type'] == 'group' }
+ expect(l2_subgroup_json['id']).to eq(l2_subgroup.id)
+
+ l3_subgroup_json = l2_subgroup_json['children'].first
+ expect(l3_subgroup_json['id']).to eq(l3_subgroup.id)
+
+ matched_project_2_json = l3_subgroup_json['children'].first
+ expect(matched_project_2_json['id']).to eq(matched_project_2.id)
+ end
+
+ it 'expands the tree upto a specified parent' do
+ subgroup = create(:group, :public, parent: group)
+ l2_subgroup = create(:group, :public, parent: subgroup)
+ create(:project, :public, namespace: l2_subgroup, name: 'test')
+
+ get :index, group_id: subgroup.to_param, filter: 'test', format: :json
+
+ expect(response).to have_http_status(200)
+ end
+
+ it 'returns an array with one element when only one result is matched' do
+ create(:project, :public, namespace: group, name: 'match')
+
+ get :index, group_id: group.to_param, filter: 'match', format: :json
+
+ expect(json_response).to be_kind_of(Array)
+ expect(json_response.size).to eq(1)
+ end
+
+ it 'returns an empty array when there are no search results' do
+ subgroup = create(:group, :public, parent: group)
+ l2_subgroup = create(:group, :public, parent: subgroup)
+ create(:project, :public, namespace: l2_subgroup, name: 'no-match')
+
+ get :index, group_id: subgroup.to_param, filter: 'test', format: :json
+
+ expect(json_response).to eq([])
+ end
+
+ it 'includes pagination headers' do
+ 2.times { |i| create(:group, :public, parent: public_subgroup, name: "filterme#{i}") }
+
+ get :index, group_id: group.to_param, filter: 'filter', per_page: 1, format: :json
+
+ expect(response).to include_pagination_headers
+ end
+ end
+
+ context 'queries per rendered element', :request_store do
+ # We need to make sure the following counts are preloaded
+ # otherwise they will cause an extra query
+ # 1. Count of visible projects in the element
+ # 2. Count of visible subgroups in the element
+ # 3. Count of members of a group
+ let(:expected_queries_per_group) { 0 }
+ let(:expected_queries_per_project) { 0 }
+
+ def get_list
+ get :index, group_id: group.to_param, format: :json
+ end
+
+ it 'queries the expected amount for a group row' do
+ control = ActiveRecord::QueryRecorder.new { get_list }
+
+ _new_group = create(:group, :public, parent: group)
+
+ expect { get_list }.not_to exceed_query_limit(control).with_threshold(expected_queries_per_group)
+ end
+
+ it 'queries the expected amount for a project row' do
+ control = ActiveRecord::QueryRecorder.new { get_list }
+ _new_project = create(:project, :public, namespace: group)
+
+ expect { get_list }.not_to exceed_query_limit(control).with_threshold(expected_queries_per_project)
+ end
+
+ context 'when rendering hierarchies' do
+ # When loading hierarchies we load the all the ancestors for matched projects
+ # in 1 separate query
+ let(:extra_queries_for_hierarchies) { 1 }
+
+ def get_filtered_list
+ get :index, group_id: group.to_param, filter: 'filter', format: :json
+ end
+
+ it 'queries the expected amount when nested rows are increased for a group' do
+ matched_group = create(:group, :public, parent: group, name: 'filterme')
+
+ control = ActiveRecord::QueryRecorder.new { get_filtered_list }
+
+ matched_group.update!(parent: public_subgroup)
+
+ expect { get_filtered_list }.not_to exceed_query_limit(control).with_threshold(extra_queries_for_hierarchies)
+ end
+
+ it 'queries the expected amount when a new group match is added' do
+ create(:group, :public, parent: public_subgroup, name: 'filterme')
+
+ control = ActiveRecord::QueryRecorder.new { get_filtered_list }
+
+ create(:group, :public, parent: public_subgroup, name: 'filterme2')
+ create(:group, :public, parent: public_subgroup, name: 'filterme3')
+
+ expect { get_filtered_list }.not_to exceed_query_limit(control).with_threshold(extra_queries_for_hierarchies)
+ end
+
+ it 'queries the expected amount when nested rows are increased for a project' do
+ matched_project = create(:project, :public, namespace: group, name: 'filterme')
+
+ control = ActiveRecord::QueryRecorder.new { get_filtered_list }
+
+ matched_project.update!(namespace: public_subgroup)
+
+ expect { get_filtered_list }.not_to exceed_query_limit(control).with_threshold(extra_queries_for_hierarchies)
+ end
+ end
+ end
+ end
+
+ context 'pagination' do
+ let(:per_page) { 3 }
+
+ before do
+ allow(Kaminari.config).to receive(:default_per_page).and_return(per_page)
+ end
+
+ context 'with only projects' do
+ let!(:other_project) { create(:project, :public, namespace: group) }
+ let!(:first_page_projects) { create_list(:project, per_page, :public, namespace: group ) }
+
+ it 'has projects on the first page' do
+ get :index, group_id: group.to_param, sort: 'id_desc', format: :json
+
+ expect(assigns(:children)).to contain_exactly(*first_page_projects)
+ end
+
+ it 'has projects on the second page' do
+ get :index, group_id: group.to_param, sort: 'id_desc', page: 2, format: :json
+
+ expect(assigns(:children)).to contain_exactly(other_project)
+ end
+ end
+
+ context 'with subgroups and projects', :nested_groups do
+ let!(:first_page_subgroups) { create_list(:group, per_page, :public, parent: group) }
+ let!(:other_subgroup) { create(:group, :public, parent: group) }
+ let!(:next_page_projects) { create_list(:project, per_page, :public, namespace: group) }
+
+ it 'contains all subgroups' do
+ get :index, group_id: group.to_param, sort: 'id_asc', format: :json
+
+ expect(assigns(:children)).to contain_exactly(*first_page_subgroups)
+ end
+
+ it 'contains the project and group on the second page' do
+ get :index, group_id: group.to_param, sort: 'id_asc', page: 2, format: :json
+
+ expect(assigns(:children)).to contain_exactly(other_subgroup, *next_page_projects.take(per_page - 1))
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index b0564e27a68..5036c1d2226 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -1,4 +1,4 @@
-require 'rails_helper'
+require 'spec_helper'
describe GroupsController do
let(:user) { create(:user) }
@@ -32,6 +32,31 @@ describe GroupsController do
end
end
+ describe 'GET #show' do
+ before do
+ sign_in(user)
+ project
+ end
+
+ context 'as html' do
+ it 'assigns whether or not a group has children' do
+ get :show, id: group.to_param
+
+ expect(assigns(:has_children)).to be_truthy
+ end
+ end
+
+ context 'as atom' do
+ it 'assigns events for all the projects in the group' do
+ create(:event, project: project)
+
+ get :show, id: group.to_param, format: :atom
+
+ expect(assigns(:events)).not_to be_empty
+ end
+ end
+ end
+
describe 'GET #new' do
context 'when creating subgroups', :nested_groups do
[true, false].each do |can_create_group_status|
@@ -150,42 +175,6 @@ describe GroupsController do
end
end
- describe 'GET #subgroups', :nested_groups do
- let!(:public_subgroup) { create(:group, :public, parent: group) }
- let!(:private_subgroup) { create(:group, :private, parent: group) }
-
- context 'as a user' do
- before do
- sign_in(user)
- end
-
- it 'shows all subgroups' do
- get :subgroups, id: group.to_param
-
- expect(assigns(:nested_groups)).to contain_exactly(public_subgroup, private_subgroup)
- end
-
- context 'being member of private subgroup' do
- it 'shows public and private subgroups the user is member of' do
- group_member.destroy!
- private_subgroup.add_guest(user)
-
- get :subgroups, id: group.to_param
-
- expect(assigns(:nested_groups)).to contain_exactly(public_subgroup, private_subgroup)
- end
- end
- end
-
- context 'as a guest' do
- it 'shows the public subgroups' do
- get :subgroups, id: group.to_param
-
- expect(assigns(:nested_groups)).to contain_exactly(public_subgroup)
- end
- end
- end
-
describe 'GET #issues' do
let(:issue_1) { create(:issue, project: project) }
let(:issue_2) { create(:issue, project: project) }
@@ -425,62 +414,62 @@ describe GroupsController do
end
end
end
- end
- context 'for a POST request' do
- context 'when requesting the canonical path with different casing' do
- it 'does not 404' do
- post :update, id: group.to_param.upcase, group: { path: 'new_path' }
+ context 'for a POST request' do
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ post :update, id: group.to_param.upcase, group: { path: 'new_path' }
- expect(response).not_to have_http_status(404)
- end
+ expect(response).not_to have_http_status(404)
+ end
- it 'does not redirect to the correct casing' do
- post :update, id: group.to_param.upcase, group: { path: 'new_path' }
+ it 'does not redirect to the correct casing' do
+ post :update, id: group.to_param.upcase, group: { path: 'new_path' }
- expect(response).not_to have_http_status(301)
+ expect(response).not_to have_http_status(301)
+ end
end
- end
- context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
- it 'returns not found' do
- post :update, id: redirect_route.path, group: { path: 'new_path' }
+ it 'returns not found' do
+ post :update, id: redirect_route.path, group: { path: 'new_path' }
- expect(response).to have_http_status(404)
+ expect(response).to have_http_status(404)
+ end
end
end
- end
- context 'for a DELETE request' do
- context 'when requesting the canonical path with different casing' do
- it 'does not 404' do
- delete :destroy, id: group.to_param.upcase
+ context 'for a DELETE request' do
+ context 'when requesting the canonical path with different casing' do
+ it 'does not 404' do
+ delete :destroy, id: group.to_param.upcase
- expect(response).not_to have_http_status(404)
- end
+ expect(response).not_to have_http_status(404)
+ end
- it 'does not redirect to the correct casing' do
- delete :destroy, id: group.to_param.upcase
+ it 'does not redirect to the correct casing' do
+ delete :destroy, id: group.to_param.upcase
- expect(response).not_to have_http_status(301)
+ expect(response).not_to have_http_status(301)
+ end
end
- end
- context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ context 'when requesting a redirected path' do
+ let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
- it 'returns not found' do
- delete :destroy, id: redirect_route.path
+ it 'returns not found' do
+ delete :destroy, id: redirect_route.path
- expect(response).to have_http_status(404)
+ expect(response).to have_http_status(404)
+ end
end
end
end
- end
- def group_moved_message(redirect_route, group)
- "Group '#{redirect_route.path}' was moved to '#{group.full_path}'. Please update any links and bookmarks that may still have the old path."
+ def group_moved_message(redirect_route, group)
+ "Group '#{redirect_route.path}' was moved to '#{group.full_path}'. Please update any links and bookmarks that may still have the old path."
+ end
end
end
diff --git a/spec/controllers/profiles_controller_spec.rb b/spec/controllers/profiles_controller_spec.rb
index ce5040ff02b..d380978b86e 100644
--- a/spec/controllers/profiles_controller_spec.rb
+++ b/spec/controllers/profiles_controller_spec.rb
@@ -1,9 +1,10 @@
require('spec_helper')
-describe ProfilesController do
- describe "PUT update" do
- it "allows an email update from a user without an external email address" do
- user = create(:user)
+describe ProfilesController, :request_store do
+ let(:user) { create(:user) }
+
+ describe 'PUT update' do
+ it 'allows an email update from a user without an external email address' do
sign_in(user)
put :update,
@@ -29,7 +30,7 @@ describe ProfilesController do
expect(user.unconfirmed_email).to eq nil
end
- it "ignores an email update from a user with an external email address" do
+ it 'ignores an email update from a user with an external email address' do
stub_omniauth_setting(sync_profile_from_provider: ['ldap'])
stub_omniauth_setting(sync_profile_attributes: true)
@@ -46,7 +47,7 @@ describe ProfilesController do
expect(ldap_user.unconfirmed_email).not_to eq('john@gmail.com')
end
- it "ignores an email and name update but allows a location update from a user with external email and name, but not external location" do
+ it 'ignores an email and name update but allows a location update from a user with external email and name, but not external location' do
stub_omniauth_setting(sync_profile_from_provider: ['ldap'])
stub_omniauth_setting(sync_profile_attributes: true)
@@ -65,4 +66,35 @@ describe ProfilesController do
expect(ldap_user.location).to eq('City, Country')
end
end
+
+ describe 'PUT update_username' do
+ let(:namespace) { user.namespace }
+ let(:project) { create(:project_empty_repo, namespace: namespace) }
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:new_username) { 'renamedtosomethingelse' }
+
+ it 'allows username change' do
+ sign_in(user)
+
+ put :update_username,
+ user: { username: new_username }
+
+ user.reload
+
+ expect(response.status).to eq(302)
+ expect(user.username).to eq(new_username)
+ end
+
+ it 'moves dependent projects to new namespace' do
+ sign_in(user)
+
+ put :update_username,
+ user: { username: new_username }
+
+ user.reload
+
+ expect(response.status).to eq(302)
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{new_username}/#{project.path}.git")).to be_truthy
+ end
+ end
end
diff --git a/spec/controllers/projects/commits_controller_spec.rb b/spec/controllers/projects/commits_controller_spec.rb
index e26731fb691..c459d732507 100644
--- a/spec/controllers/projects/commits_controller_spec.rb
+++ b/spec/controllers/projects/commits_controller_spec.rb
@@ -10,9 +10,36 @@ describe Projects::CommitsController do
end
describe "GET show" do
- context "when the ref name ends in .atom" do
- render_views
+ render_views
+
+ context 'with file path' do
+ before do
+ get(:show,
+ namespace_id: project.namespace,
+ project_id: project,
+ id: id)
+ end
+
+ context "valid branch, valid file" do
+ let(:id) { 'master/README.md' }
+
+ it { is_expected.to respond_with(:success) }
+ end
+
+ context "valid branch, invalid file" do
+ let(:id) { 'master/invalid-path.rb' }
+ it { is_expected.to respond_with(:not_found) }
+ end
+
+ context "invalid branch, valid file" do
+ let(:id) { 'invalid-branch/README.md' }
+
+ it { is_expected.to respond_with(:not_found) }
+ end
+ end
+
+ context "when the ref name ends in .atom" do
context "when the ref does not exist with the suffix" do
it "renders as atom" do
get(:show,
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 053bd73fee3..ed8088a46f0 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -850,47 +850,48 @@ describe Projects::IssuesController do
describe 'GET #discussions' do
let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
-
- before do
- project.add_developer(user)
- sign_in(user)
- end
-
- it 'returns discussion json' do
- get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
-
- expect(JSON.parse(response.body).first.keys).to match_array(%w[id reply_id expanded notes individual_note])
- end
-
- context 'with cross-reference system note', :request_store do
- let(:new_issue) { create(:issue) }
- let(:cross_reference) { "mentioned in #{new_issue.to_reference(issue.project)}" }
-
+ context 'when authenticated' do
before do
- create(:discussion_note_on_issue, :system, noteable: issue, project: issue.project, note: cross_reference)
+ project.add_developer(user)
+ sign_in(user)
end
- it 'filters notes that the user should not see' do
+ it 'returns discussion json' do
get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
- expect(JSON.parse(response.body).count).to eq(1)
+ expect(json_response.first.keys).to match_array(%w[id reply_id expanded notes individual_note])
end
- it 'does not result in N+1 queries' do
- # Instantiate the controller variables to ensure QueryRecorder has an accurate base count
- get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
+ context 'with cross-reference system note', :request_store do
+ let(:new_issue) { create(:issue) }
+ let(:cross_reference) { "mentioned in #{new_issue.to_reference(issue.project)}" }
- RequestStore.clear!
+ before do
+ create(:discussion_note_on_issue, :system, noteable: issue, project: issue.project, note: cross_reference)
+ end
- control_count = ActiveRecord::QueryRecorder.new do
+ it 'filters notes that the user should not see' do
get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
- end.count
- RequestStore.clear!
+ expect(JSON.parse(response.body).count).to eq(1)
+ end
- create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
+ it 'does not result in N+1 queries' do
+ # Instantiate the controller variables to ensure QueryRecorder has an accurate base count
+ get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
- expect { get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid }.not_to exceed_query_limit(control_count)
+ RequestStore.clear!
+
+ control_count = ActiveRecord::QueryRecorder.new do
+ get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid
+ end.count
+
+ RequestStore.clear!
+
+ create_list(:discussion_note_on_issue, 2, :system, noteable: issue, project: issue.project, note: cross_reference)
+
+ expect { get :discussions, namespace_id: project.namespace, project_id: project, id: issue.iid }.not_to exceed_query_limit(control_count)
+ end
end
end
end
diff --git a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
index 393d38c6e6b..c6d50c28106 100644
--- a/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/conflicts_controller_spec.rb
@@ -17,8 +17,8 @@ describe Projects::MergeRequests::ConflictsController do
describe 'GET show' do
context 'when the conflicts cannot be resolved in the UI' do
before do
- allow_any_instance_of(Gitlab::Conflict::Parser)
- .to receive(:parse).and_raise(Gitlab::Conflict::Parser::UnmergeableFile)
+ allow(Gitlab::Git::Conflict::Parser).to receive(:parse)
+ .and_raise(Gitlab::Git::Conflict::Parser::UnmergeableFile)
get :show,
namespace_id: merge_request_with_conflicts.project.namespace.to_param,
@@ -109,8 +109,8 @@ describe Projects::MergeRequests::ConflictsController do
context 'when the conflicts cannot be resolved in the UI' do
before do
- allow_any_instance_of(Gitlab::Conflict::Parser)
- .to receive(:parse).and_raise(Gitlab::Conflict::Parser::UnmergeableFile)
+ allow(Gitlab::Git::Conflict::Parser).to receive(:parse)
+ .and_raise(Gitlab::Git::Conflict::Parser::UnmergeableFile)
conflict_for_path('files/ruby/regex.rb')
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index 167e80ed9cd..67b53d2acce 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -3,32 +3,36 @@ require 'spec_helper'
describe Projects::PipelinesController do
include ApiHelpers
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
+ set(:user) { create(:user) }
+ set(:project) { create(:project, :public, :repository) }
let(:feature) { ProjectFeature::DISABLED }
before do
stub_not_protect_default_branch
project.add_developer(user)
- project.project_feature.update(
- builds_access_level: feature)
+ project.project_feature.update(builds_access_level: feature)
sign_in(user)
end
describe 'GET index.json' do
before do
- create(:ci_empty_pipeline, status: 'pending', project: project)
- create(:ci_empty_pipeline, status: 'running', project: project)
- create(:ci_empty_pipeline, status: 'created', project: project)
- create(:ci_empty_pipeline, status: 'success', project: project)
+ branch_head = project.commit
+ parent = branch_head.parent
- get :index, namespace_id: project.namespace,
- project_id: project,
- format: :json
+ create(:ci_empty_pipeline, status: 'pending', project: project, sha: branch_head.id)
+ create(:ci_empty_pipeline, status: 'running', project: project, sha: branch_head.id)
+ create(:ci_empty_pipeline, status: 'created', project: project, sha: parent.id)
+ create(:ci_empty_pipeline, status: 'success', project: project, sha: parent.id)
+ end
+
+ subject do
+ get :index, namespace_id: project.namespace, project_id: project, format: :json
end
it 'returns JSON with serialized pipelines' do
+ subject
+
expect(response).to have_http_status(:ok)
expect(response).to match_response_schema('pipeline')
@@ -39,6 +43,12 @@ describe Projects::PipelinesController do
expect(json_response['count']['pending']).to eq 1
expect(json_response['count']['finished']).to eq 1
end
+
+ context 'when performing gitaly calls', :request_store do
+ it 'limits the Gitaly requests' do
+ expect { subject }.to change { Gitlab::GitalyClient.get_request_count }.by(10)
+ end
+ end
end
describe 'GET show JSON' do
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 0544afe31ed..7569052c3aa 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -222,6 +222,14 @@ describe ProjectsController do
get :show, namespace_id: public_project.namespace, id: public_project
expect(response).to render_template('_files')
end
+
+ it "renders the readme view" do
+ allow(controller).to receive(:current_user).and_return(user)
+ allow(user).to receive(:project_view).and_return('readme')
+
+ get :show, namespace_id: public_project.namespace, id: public_project
+ expect(response).to render_template('_readme')
+ end
end
context "when the url contains .atom" do
diff --git a/spec/features/admin/admin_health_check_spec.rb b/spec/features/admin/admin_health_check_spec.rb
index 09e6965849a..4430fc15501 100644
--- a/spec/features/admin/admin_health_check_spec.rb
+++ b/spec/features/admin/admin_health_check_spec.rb
@@ -65,9 +65,11 @@ feature "Admin Health Check", :feature, :broken_storage do
it 'shows storage failure information' do
hostname = Gitlab::Environment.hostname
+ maximum_failures = Gitlab::CurrentSettings.current_application_settings
+ .circuitbreaker_failure_count_threshold
expect(page).to have_content('broken: failed storage access attempt on host:')
- expect(page).to have_content("#{hostname}: 1 of 10 failures.")
+ expect(page).to have_content("#{hostname}: 1 of #{maximum_failures} failures.")
end
it 'allows resetting storage failures' do
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index 86820a0c4a6..ea29b55de41 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -172,12 +172,14 @@ describe 'Issue Boards', :js do
expect(page).to have_selector('.card', count: 20)
expect(page).to have_content('Showing 20 of 58 issues')
+ find('.board .board-list')
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
wait_for_requests
expect(page).to have_selector('.card', count: 40)
expect(page).to have_content('Showing 40 of 58 issues')
+ find('.board .board-list')
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
wait_for_requests
@@ -450,11 +452,13 @@ describe 'Issue Boards', :js do
expect(page).to have_selector('.card', count: 20)
expect(page).to have_content('Showing 20 of 51 issues')
+ find('.board .board-list')
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
expect(page).to have_selector('.card', count: 40)
expect(page).to have_content('Showing 40 of 51 issues')
+ find('.board .board-list')
evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
expect(page).to have_selector('.card', count: 51)
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index 22f6fcdf08e..9137ab82ff4 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -1,6 +1,8 @@
require 'rails_helper'
describe 'Issue Boards', :js do
+ include BoardHelpers
+
let(:user) { create(:user) }
let(:user2) { create(:user) }
let(:project) { create(:project, :public) }
@@ -309,6 +311,21 @@ describe 'Issue Boards', :js do
expect(card).to have_selector('.label', count: 1)
expect(card).not_to have_content(stretch.title)
end
+
+ it 'creates new label' do
+ click_card(card)
+
+ page.within('.labels') do
+ click_link 'Edit'
+ click_link 'Create new label'
+ fill_in 'new_label_name', with: 'test label'
+ first('.suggest-colors-dropdown a').click
+ click_button 'Create'
+ wait_for_requests
+
+ expect(page).to have_link 'test label'
+ end
+ end
end
context 'subscription' do
@@ -322,19 +339,4 @@ describe 'Issue Boards', :js do
end
end
end
-
- def click_card(card)
- page.within(card) do
- first('.card-number').click
- end
-
- wait_for_sidebar
- end
-
- def wait_for_sidebar
- # loop until the CSS transition is complete
- Timeout.timeout(0.5) do
- loop until evaluate_script('$(".right-sidebar").outerWidth()') == 290
- end
- end
end
diff --git a/spec/features/ci_lint_spec.rb b/spec/features/ci_lint_spec.rb
index 9accd7bb07c..9bc23baf6cf 100644
--- a/spec/features/ci_lint_spec.rb
+++ b/spec/features/ci_lint_spec.rb
@@ -10,6 +10,7 @@ describe 'CI Lint', :js do
visit ci_lint_path
# Ace editor updates a hidden textarea and it happens asynchronously
# `sleep 0.1` is actually needed here because of this
+ find('#ci-editor')
execute_script("ace.edit('ci-editor').setValue(" + yaml_content.to_json + ");")
sleep 0.1
click_on 'Validate'
diff --git a/spec/features/dashboard/groups_list_spec.rb b/spec/features/dashboard/groups_list_spec.rb
index 1cb78410957..d92c002b4e7 100644
--- a/spec/features/dashboard/groups_list_spec.rb
+++ b/spec/features/dashboard/groups_list_spec.rb
@@ -6,6 +6,13 @@ feature 'Dashboard Groups page', :js do
let(:nested_group) { create(:group, :nested) }
let(:another_group) { create(:group) }
+ def click_group_caret(group)
+ within("#group-#{group.id}") do
+ first('.folder-caret').click
+ end
+ wait_for_requests
+ end
+
it 'shows groups user is member of' do
group.add_owner(user)
nested_group.add_owner(user)
@@ -13,13 +20,27 @@ feature 'Dashboard Groups page', :js do
sign_in(user)
visit dashboard_groups_path
+ wait_for_requests
+
+ expect(page).to have_content(group.name)
+
+ expect(page).not_to have_content(another_group.name)
+ end
+
+ it 'shows subgroups the user is member of', :nested_groups do
+ group.add_owner(user)
+ nested_group.add_owner(user)
+
+ sign_in(user)
+ visit dashboard_groups_path
+ wait_for_requests
- expect(page).to have_content(group.full_name)
- expect(page).to have_content(nested_group.full_name)
- expect(page).not_to have_content(another_group.full_name)
+ expect(page).to have_content(nested_group.parent.name)
+ click_group_caret(nested_group.parent)
+ expect(page).to have_content(nested_group.name)
end
- describe 'when filtering groups' do
+ describe 'when filtering groups', :nested_groups do
before do
group.add_owner(user)
nested_group.add_owner(user)
@@ -30,25 +51,26 @@ feature 'Dashboard Groups page', :js do
visit dashboard_groups_path
end
- it 'filters groups' do
- fill_in 'filter_groups', with: group.name
+ it 'expands when filtering groups' do
+ fill_in 'filter', with: nested_group.name
wait_for_requests
- expect(page).to have_content(group.full_name)
- expect(page).not_to have_content(nested_group.full_name)
- expect(page).not_to have_content(another_group.full_name)
+ expect(page).not_to have_content(group.name)
+ expect(page).to have_content(nested_group.parent.name)
+ expect(page).to have_content(nested_group.name)
+ expect(page).not_to have_content(another_group.name)
end
it 'resets search when user cleans the input' do
- fill_in 'filter_groups', with: group.name
+ fill_in 'filter', with: group.name
wait_for_requests
- fill_in 'filter_groups', with: ''
+ fill_in 'filter', with: ''
wait_for_requests
- expect(page).to have_content(group.full_name)
- expect(page).to have_content(nested_group.full_name)
- expect(page).not_to have_content(another_group.full_name)
+ expect(page).to have_content(group.name)
+ expect(page).to have_content(nested_group.parent.name)
+ expect(page).not_to have_content(another_group.name)
expect(page.all('.js-groups-list-holder .content-list li').length).to eq 2
end
end
@@ -66,28 +88,29 @@ feature 'Dashboard Groups page', :js do
end
it 'shows subgroups inside of its parent group' do
- expect(page).to have_selector('.groups-list-tree-container .group-list-tree', count: 2)
- expect(page).to have_selector(".groups-list-tree-container #group-#{group.id} #group-#{subgroup.id}", count: 1)
+ expect(page).to have_selector("#group-#{group.id}")
+ click_group_caret(group)
+ expect(page).to have_selector("#group-#{group.id} #group-#{subgroup.id}")
end
it 'can toggle parent group' do
- # Expanded by default
- expect(page).to have_selector("#group-#{group.id} .fa-caret-down", count: 1)
- expect(page).not_to have_selector("#group-#{group.id} .fa-caret-right")
+ # Collapsed by default
+ expect(page).not_to have_selector("#group-#{group.id} .fa-caret-down", count: 1)
+ expect(page).to have_selector("#group-#{group.id} .fa-caret-right")
- # Collapse
- find("#group-#{group.id} .folder-caret").click
+ # expand
+ click_group_caret(group)
- expect(page).not_to have_selector("#group-#{group.id} .fa-caret-down")
- expect(page).to have_selector("#group-#{group.id} .fa-caret-right", count: 1)
- expect(page).not_to have_selector("#group-#{group.id} #group-#{subgroup.id}")
+ expect(page).to have_selector("#group-#{group.id} .fa-caret-down")
+ expect(page).not_to have_selector("#group-#{group.id} .fa-caret-right", count: 1)
+ expect(page).to have_selector("#group-#{group.id} #group-#{subgroup.id}")
- # Expand
- find("#group-#{group.id} .folder-caret").click
+ # collapse
+ click_group_caret(group)
- expect(page).to have_selector("#group-#{group.id} .fa-caret-down", count: 1)
- expect(page).not_to have_selector("#group-#{group.id} .fa-caret-right")
- expect(page).to have_selector("#group-#{group.id} #group-#{subgroup.id}")
+ expect(page).not_to have_selector("#group-#{group.id} .fa-caret-down", count: 1)
+ expect(page).to have_selector("#group-#{group.id} .fa-caret-right")
+ expect(page).not_to have_selector("#group-#{group.id} #group-#{subgroup.id}")
end
end
diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb
index 36eb3d4923a..5b4c00b3c7e 100644
--- a/spec/features/dashboard/issues_spec.rb
+++ b/spec/features/dashboard/issues_spec.rb
@@ -87,8 +87,10 @@ RSpec.describe 'Dashboard Issues' do
project_path = "/#{project.path_with_namespace}"
project_json = { name: project.name_with_namespace, url: project_path }.to_json
- # similate selection, and prevent overlap by dropdown menu
+ # simulate selection, and prevent overlap by dropdown menu
+ first('.project-item-select', visible: false)
execute_script("$('.project-item-select').val('#{project_json}').trigger('change');")
+ find('#select2-drop-mask', visible: false)
execute_script("$('#select2-drop-mask').remove();")
find('.new-project-item-link').click
diff --git a/spec/features/explore/groups_list_spec.rb b/spec/features/explore/groups_list_spec.rb
index b5325301968..801a33979ff 100644
--- a/spec/features/explore/groups_list_spec.rb
+++ b/spec/features/explore/groups_list_spec.rb
@@ -13,6 +13,7 @@ describe 'Explore Groups page', :js do
sign_in(user)
visit explore_groups_path
+ wait_for_requests
end
it 'shows groups user is member of' do
@@ -22,7 +23,7 @@ describe 'Explore Groups page', :js do
end
it 'filters groups' do
- fill_in 'filter_groups', with: group.name
+ fill_in 'filter', with: group.name
wait_for_requests
expect(page).to have_content(group.full_name)
@@ -31,10 +32,10 @@ describe 'Explore Groups page', :js do
end
it 'resets search when user cleans the input' do
- fill_in 'filter_groups', with: group.name
+ fill_in 'filter', with: group.name
wait_for_requests
- fill_in 'filter_groups', with: ""
+ fill_in 'filter', with: ""
wait_for_requests
expect(page).to have_content(group.full_name)
@@ -45,21 +46,21 @@ describe 'Explore Groups page', :js do
it 'shows non-archived projects count' do
# Initially project is not archived
- expect(find('.js-groups-list-holder .content-list li:first-child .stats span:first-child')).to have_text("1")
+ expect(find('.js-groups-list-holder .content-list li:first-child .stats .number-projects')).to have_text("1")
# Archive project
empty_project.archive!
visit explore_groups_path
# Check project count
- expect(find('.js-groups-list-holder .content-list li:first-child .stats span:first-child')).to have_text("0")
+ expect(find('.js-groups-list-holder .content-list li:first-child .stats .number-projects')).to have_text("0")
# Unarchive project
empty_project.unarchive!
visit explore_groups_path
# Check project count
- expect(find('.js-groups-list-holder .content-list li:first-child .stats span:first-child')).to have_text("1")
+ expect(find('.js-groups-list-holder .content-list li:first-child .stats .number-projects')).to have_text("1")
end
describe 'landing component' do
diff --git a/spec/features/groups/show_spec.rb b/spec/features/groups/show_spec.rb
index 303013e59d5..7fc2b383749 100644
--- a/spec/features/groups/show_spec.rb
+++ b/spec/features/groups/show_spec.rb
@@ -24,4 +24,35 @@ feature 'Group show page' do
it_behaves_like "an autodiscoverable RSS feed without an RSS token"
end
+
+ context 'subgroup support' do
+ let(:user) { create(:user) }
+
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ context 'when subgroups are supported', :js, :nested_groups do
+ before do
+ allow(Group).to receive(:supports_nested_groups?) { true }
+ visit path
+ end
+
+ it 'allows creating subgroups' do
+ expect(page).to have_css("li[data-text='New subgroup']", visible: false)
+ end
+ end
+
+ context 'when subgroups are not supported' do
+ before do
+ allow(Group).to receive(:supports_nested_groups?) { false }
+ visit path
+ end
+
+ it 'allows creating subgroups' do
+ expect(page).not_to have_selector("li[data-text='New subgroup']", visible: false)
+ end
+ end
+ end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 563e8a65b6e..c1f3d94bc20 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -90,8 +90,7 @@ feature 'Group' do
context 'as admin' do
before do
- visit subgroups_group_path(group)
- click_link 'New Subgroup'
+ visit new_group_path(group, parent_id: group.id)
end
it 'creates a nested group' do
@@ -111,8 +110,8 @@ feature 'Group' do
sign_out(:user)
sign_in(user)
- visit subgroups_group_path(group)
- click_link 'New Subgroup'
+ visit new_group_path(group, parent_id: group.id)
+
fill_in 'Group path', with: 'bar'
click_button 'Create group'
@@ -120,16 +119,6 @@ feature 'Group' do
expect(page).to have_content("Group 'bar' was successfully created.")
end
end
-
- context 'when nested group feature is disabled' do
- it 'renders 404' do
- allow(Group).to receive(:supports_nested_groups?).and_return(false)
-
- visit subgroups_group_path(group)
-
- expect(page.status_code).to eq(404)
- end
- end
end
it 'checks permissions to avoid exposing groups by parent_id' do
@@ -210,13 +199,15 @@ feature 'Group' do
describe 'group page with nested groups', :nested_groups, :js do
let!(:group) { create(:group) }
let!(:nested_group) { create(:group, parent: group) }
+ let!(:project) { create(:project, namespace: group) }
let!(:path) { group_path(group) }
- it 'has nested groups tab with nested groups inside' do
+ it 'it renders projects and groups on the page' do
visit path
- click_link 'Subgroups'
+ wait_for_requests
expect(page).to have_content(nested_group.name)
+ expect(page).to have_content(project.name)
end
end
diff --git a/spec/features/issues/issue_detail_spec.rb b/spec/features/issues/issue_detail_spec.rb
index c0c396af93f..6fbee0ebcb5 100644
--- a/spec/features/issues/issue_detail_spec.rb
+++ b/spec/features/issues/issue_detail_spec.rb
@@ -25,7 +25,7 @@ feature 'Issue Detail', :js do
wait_for_requests
click_link 'Edit'
- fill_in 'issue-title', with: 'issue title'
+ fill_in 'issuable-title', with: 'issue title'
click_button 'Save'
Users::DestroyService.new(user).execute(user)
diff --git a/spec/features/issues/markdown_toolbar_spec.rb b/spec/features/issues/markdown_toolbar_spec.rb
index 6869c2c869d..fee8fd9b365 100644
--- a/spec/features/issues/markdown_toolbar_spec.rb
+++ b/spec/features/issues/markdown_toolbar_spec.rb
@@ -16,6 +16,7 @@ feature 'Issue markdown toolbar', :js do
find('#note-body').native.send_key(:enter)
find('#note-body').native.send_keys('bold')
+ find('.js-main-target-form #note-body')
page.evaluate_script('document.querySelectorAll(".js-main-target-form #note-body")[0].setSelectionRange(4, 9)')
first('.toolbar-btn').click
@@ -28,6 +29,7 @@ feature 'Issue markdown toolbar', :js do
find('#note-body').native.send_key(:enter)
find('#note-body').native.send_keys('underline')
+ find('.js-main-target-form #note-body')
page.evaluate_script('document.querySelectorAll(".js-main-target-form #note-body")[0].setSelectionRange(4, 50)')
find('.toolbar-btn:nth-child(2)').click
diff --git a/spec/features/merge_requests/conflicts_spec.rb b/spec/features/merge_requests/conflicts_spec.rb
index de6a4ea6f65..4e2963c116d 100644
--- a/spec/features/merge_requests/conflicts_spec.rb
+++ b/spec/features/merge_requests/conflicts_spec.rb
@@ -60,12 +60,14 @@ feature 'Merge request conflict resolution', :js do
within find('.files-wrapper .diff-file', text: 'files/ruby/popen.rb') do
click_button 'Edit inline'
wait_for_requests
+ find('.files-wrapper .diff-file pre')
execute_script('ace.edit($(".files-wrapper .diff-file pre")[0]).setValue("One morning");')
end
within find('.files-wrapper .diff-file', text: 'files/ruby/regex.rb') do
click_button 'Edit inline'
wait_for_requests
+ find('.files-wrapper .diff-file pre')
execute_script('ace.edit($(".files-wrapper .diff-file pre")[1]).setValue("Gregor Samsa woke from troubled dreams");')
end
@@ -139,6 +141,7 @@ feature 'Merge request conflict resolution', :js do
it 'conflicts are resolved in Edit inline mode' do
within find('.files-wrapper .diff-file', text: 'files/markdown/ruby-style-guide.md') do
wait_for_requests
+ find('.files-wrapper .diff-file pre')
execute_script('ace.edit($(".files-wrapper .diff-file pre")[0]).setValue("Gregor Samsa woke from troubled dreams");')
end
diff --git a/spec/features/merge_requests/diff_notes_resolve_spec.rb b/spec/features/merge_requests/diff_notes_resolve_spec.rb
index 2b580ca1809..7264b3b6517 100644
--- a/spec/features/merge_requests/diff_notes_resolve_spec.rb
+++ b/spec/features/merge_requests/diff_notes_resolve_spec.rb
@@ -97,14 +97,33 @@ feature 'Diff notes resolve', :js do
visit_merge_request
end
- it 'hides when resolve discussion is clicked' do
- expect(page).to have_selector('.discussion-body', visible: false)
+ describe 'timeline view' do
+ it 'hides when resolve discussion is clicked' do
+ expect(page).to have_selector('.discussion-body', visible: false)
+ end
+
+ it 'shows resolved discussion when toggled' do
+ find(".timeline-content .discussion[data-discussion-id='#{note.discussion_id}'] .discussion-toggle-button").click
+
+ expect(page.find(".timeline-content #note_#{note.noteable_id}")).to be_visible
+ end
end
- it 'shows resolved discussion when toggled' do
- find(".timeline-content .discussion[data-discussion-id='#{note.discussion_id}'] .discussion-toggle-button").click
+ describe 'side-by-side view' do
+ before do
+ page.within('.merge-request-tabs') { click_link 'Changes' }
+ page.find('#parallel-diff-btn').click
+ end
- expect(page.find(".timeline-content #note_#{note.noteable_id}")).to be_visible
+ it 'hides when resolve discussion is clicked' do
+ expect(page).to have_selector('.diffs .diff-file .notes_holder', visible: false)
+ end
+
+ it 'shows resolved discussion when toggled' do
+ find('.diff-comment-avatar-holders').click
+
+ expect(find('.diffs .diff-file .notes_holder')).to be_visible
+ end
end
end
diff --git a/spec/features/merge_requests/edit_mr_spec.rb b/spec/features/merge_requests/edit_mr_spec.rb
index 4538555c168..4362f8b3fcc 100644
--- a/spec/features/merge_requests/edit_mr_spec.rb
+++ b/spec/features/merge_requests/edit_mr_spec.rb
@@ -66,6 +66,7 @@ feature 'Edit Merge Request' do
end
def get_textarea_height
+ find('#merge_request_description')
page.evaluate_script('document.getElementById("merge_request_description").offsetHeight')
end
end
diff --git a/spec/features/merge_requests/mini_pipeline_graph_spec.rb b/spec/features/merge_requests/mini_pipeline_graph_spec.rb
index 27b5e3cfec6..bac56270362 100644
--- a/spec/features/merge_requests/mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_requests/mini_pipeline_graph_spec.rb
@@ -52,10 +52,12 @@ feature 'Mini Pipeline Graph', :js do
end
it 'should expand when hovered' do
+ find('.mini-pipeline-graph-dropdown-toggle')
before_width = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').outerWidth();")
toggle.hover
+ find('.mini-pipeline-graph-dropdown-toggle')
after_width = evaluate_script("$('.mini-pipeline-graph-dropdown-toggle:visible').outerWidth();")
expect(before_width).to be < after_width
diff --git a/spec/features/projects/blobs/edit_spec.rb b/spec/features/projects/blobs/edit_spec.rb
index 6c625ed17aa..965028a6f90 100644
--- a/spec/features/projects/blobs/edit_spec.rb
+++ b/spec/features/projects/blobs/edit_spec.rb
@@ -20,6 +20,7 @@ feature 'Editing file blob', :js do
def edit_and_commit
wait_for_requests
find('.js-edit-blob').click
+ find('#editor')
execute_script('ace.edit("editor").setValue("class NextFeature\nend\n")')
click_button 'Commit changes'
end
diff --git a/spec/features/projects/issuable_templates_spec.rb b/spec/features/projects/issuable_templates_spec.rb
index 9f67216705d..a012db8fd27 100644
--- a/spec/features/projects/issuable_templates_spec.rb
+++ b/spec/features/projects/issuable_templates_spec.rb
@@ -35,7 +35,7 @@ feature 'issuable templates', :js do
page.within('.content .issuable-actions') do
click_on 'Edit'
end
- fill_in :'issue-title', with: 'test issue title'
+ fill_in :'issuable-title', with: 'test issue title'
end
scenario 'user selects "bug" template' do
@@ -80,7 +80,7 @@ feature 'issuable templates', :js do
page.within('.content .issuable-actions') do
click_on 'Edit'
end
- fill_in :'issue-title', with: 'test issue title'
+ fill_in :'issuable-title', with: 'test issue title'
fill_in :'issue-description', with: prior_description
end
diff --git a/spec/features/projects/user_creates_files_spec.rb b/spec/features/projects/user_creates_files_spec.rb
index cbe70a93942..d84b91ddc32 100644
--- a/spec/features/projects/user_creates_files_spec.rb
+++ b/spec/features/projects/user_creates_files_spec.rb
@@ -60,6 +60,7 @@ describe 'User creates files' do
end
it 'creates and commit a new file', :js do
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -75,6 +76,7 @@ describe 'User creates files' do
end
it 'creates and commit a new file with new lines at the end of file', :js do
+ find('#editor')
execute_script('ace.edit("editor").setValue("Sample\n\n\n")')
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -86,6 +88,7 @@ describe 'User creates files' do
find('.js-edit-blob').click
+ find('#editor')
expect(evaluate_script('ace.edit("editor").getValue()')).to eq("Sample\n\n\n")
end
@@ -94,6 +97,7 @@ describe 'User creates files' do
expect(page).to have_selector('.file-editor')
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -108,6 +112,7 @@ describe 'User creates files' do
it 'creates and commit a new file specifying a new branch', :js do
expect(page).to have_selector('.file-editor')
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:file_name, with: 'not_a_file.md')
fill_in(:commit_message, with: 'New commit message', visible: true)
@@ -136,6 +141,7 @@ describe 'User creates files' do
expect(page).to have_selector('.file-editor')
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:file_name, with: 'not_a_file.md')
diff --git a/spec/features/projects/user_edits_files_spec.rb b/spec/features/projects/user_edits_files_spec.rb
index e8d83a661d4..d26ee653415 100644
--- a/spec/features/projects/user_edits_files_spec.rb
+++ b/spec/features/projects/user_edits_files_spec.rb
@@ -23,6 +23,7 @@ describe 'User edits files' do
find('.js-edit-blob').click
find('.file-editor', match: :first)
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
expect(evaluate_script('ace.edit("editor").getValue()')).to eq('*.rbca')
@@ -40,6 +41,7 @@ describe 'User edits files' do
find('.js-edit-blob').click
find('.file-editor', match: :first)
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -57,6 +59,7 @@ describe 'User edits files' do
find('.file-editor', match: :first)
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
fill_in(:branch_name, with: 'new_branch_name', visible: true)
@@ -74,6 +77,7 @@ describe 'User edits files' do
find('.js-edit-blob').click
find('.file-editor', match: :first)
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
click_link('Preview changes')
@@ -103,6 +107,7 @@ describe 'User edits files' do
find('.file-editor', match: :first)
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
expect(evaluate_script('ace.edit("editor").getValue()')).to eq('*.rbca')
@@ -119,6 +124,7 @@ describe 'User edits files' do
find('.file-editor', match: :first)
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'New commit message', visible: true)
click_button('Commit changes')
@@ -145,6 +151,7 @@ describe 'User edits files' do
expect(page).not_to have_link('Fork')
expect(page).not_to have_button('Cancel')
+ find('#editor')
execute_script("ace.edit('editor').setValue('*.rbca')")
fill_in(:commit_message, with: 'Another commit', visible: true)
click_button('Commit changes')
diff --git a/spec/features/projects/wiki/markdown_preview_spec.rb b/spec/features/projects/wiki/markdown_preview_spec.rb
index 39bcda03b4b..337baaf4dcd 100644
--- a/spec/features/projects/wiki/markdown_preview_spec.rb
+++ b/spec/features/projects/wiki/markdown_preview_spec.rb
@@ -14,7 +14,6 @@ feature 'Projects > Wiki > User previews markdown changes', :js do
background do
project.team << [user, :master]
- WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute
sign_in(user)
diff --git a/spec/features/projects/wiki/shortcuts_spec.rb b/spec/features/projects/wiki/shortcuts_spec.rb
index eaff5f876b6..f70d1e710dd 100644
--- a/spec/features/projects/wiki/shortcuts_spec.rb
+++ b/spec/features/projects/wiki/shortcuts_spec.rb
@@ -3,9 +3,7 @@ require 'spec_helper'
feature 'Wiki shortcuts', :js do
let(:user) { create(:user) }
let(:project) { create(:project, namespace: user.namespace) }
- let(:wiki_page) do
- WikiPages::CreateService.new(project, user, title: 'home', content: 'Home page').execute
- end
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki, attrs: { title: 'home', content: 'Home page' }) }
before do
sign_in(user)
diff --git a/spec/features/projects/wiki/user_git_access_wiki_page_spec.rb b/spec/features/projects/wiki/user_git_access_wiki_page_spec.rb
index 9a92622ba2b..37a118c34ab 100644
--- a/spec/features/projects/wiki/user_git_access_wiki_page_spec.rb
+++ b/spec/features/projects/wiki/user_git_access_wiki_page_spec.rb
@@ -3,14 +3,7 @@ require 'spec_helper'
describe 'Projects > Wiki > User views Git access wiki page' do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
- let(:wiki_page) do
- WikiPages::CreateService.new(
- project,
- user,
- title: 'home',
- content: '[some link](other-page)'
- ).execute
- end
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki, attrs: { title: 'home', content: '[some link](other-page)' }) }
before do
sign_in(user)
diff --git a/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb b/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb
index cf9fe4c1ad1..ebb3bd044c1 100644
--- a/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb
+++ b/spec/features/projects/wiki/user_views_wiki_in_project_page_spec.rb
@@ -18,12 +18,7 @@ describe 'Projects > Wiki > User views wiki in project page' do
context 'when wiki homepage contains a link' do
before do
- WikiPages::CreateService.new(
- project,
- user,
- title: 'home',
- content: '[some link](other-page)'
- ).execute
+ create(:wiki_page, wiki: project.wiki, attrs: { title: 'home', content: '[some link](other-page)' })
end
it 'displays the correct URL for the link' do
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 3bc7ec3123f..3b01ed442bf 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -139,7 +139,7 @@ feature 'Project' do
it 'removes a project' do
expect { remove_with_confirm('Remove project', project.path) }.to change {Project.count}.by(-1)
- expect(page).to have_content "Project 'test / project1' will be deleted."
+ expect(page).to have_content "Project 'test / project1' is in the process of being deleted."
expect(Project.all.count).to be_zero
expect(project.issues).to be_empty
expect(project.merge_requests).to be_empty
diff --git a/spec/finders/group_descendants_finder_spec.rb b/spec/finders/group_descendants_finder_spec.rb
new file mode 100644
index 00000000000..074914420a1
--- /dev/null
+++ b/spec/finders/group_descendants_finder_spec.rb
@@ -0,0 +1,166 @@
+require 'spec_helper'
+
+describe GroupDescendantsFinder do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:params) { {} }
+ subject(:finder) do
+ described_class.new(current_user: user, parent_group: group, params: params)
+ end
+
+ before do
+ group.add_owner(user)
+ end
+
+ describe '#has_children?' do
+ it 'is true when there are projects' do
+ create(:project, namespace: group)
+
+ expect(finder.has_children?).to be_truthy
+ end
+
+ context 'when there are subgroups', :nested_groups do
+ it 'is true when there are projects' do
+ create(:group, parent: group)
+
+ expect(finder.has_children?).to be_truthy
+ end
+ end
+ end
+
+ describe '#execute' do
+ it 'includes projects' do
+ project = create(:project, namespace: group)
+
+ expect(finder.execute).to contain_exactly(project)
+ end
+
+ context 'when archived is `true`' do
+ let(:params) { { archived: 'true' } }
+
+ it 'includes archived projects' do
+ archived_project = create(:project, namespace: group, archived: true)
+ project = create(:project, namespace: group)
+
+ expect(finder.execute).to contain_exactly(archived_project, project)
+ end
+ end
+
+ context 'when archived is `only`' do
+ let(:params) { { archived: 'only' } }
+
+ it 'includes only archived projects' do
+ archived_project = create(:project, namespace: group, archived: true)
+ _project = create(:project, namespace: group)
+
+ expect(finder.execute).to contain_exactly(archived_project)
+ end
+ end
+
+ it 'does not include archived projects' do
+ _archived_project = create(:project, :archived, namespace: group)
+
+ expect(finder.execute).to be_empty
+ end
+
+ context 'with a filter' do
+ let(:params) { { filter: 'test' } }
+
+ it 'includes only projects matching the filter' do
+ _other_project = create(:project, namespace: group)
+ matching_project = create(:project, namespace: group, name: 'testproject')
+
+ expect(finder.execute).to contain_exactly(matching_project)
+ end
+ end
+ end
+
+ context 'with nested groups', :nested_groups do
+ let!(:project) { create(:project, namespace: group) }
+ let!(:subgroup) { create(:group, :private, parent: group) }
+
+ describe '#execute' do
+ it 'contains projects and subgroups' do
+ expect(finder.execute).to contain_exactly(subgroup, project)
+ end
+
+ it 'does not include subgroups the user does not have access to' do
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+
+ public_subgroup = create(:group, :public, parent: group, path: 'public-group')
+ other_subgroup = create(:group, :private, parent: group, path: 'visible-private-group')
+ other_user = create(:user)
+ other_subgroup.add_developer(other_user)
+
+ finder = described_class.new(current_user: other_user, parent_group: group)
+
+ expect(finder.execute).to contain_exactly(public_subgroup, other_subgroup)
+ end
+
+ it 'only includes public groups when no user is given' do
+ public_subgroup = create(:group, :public, parent: group)
+ _private_subgroup = create(:group, :private, parent: group)
+
+ finder = described_class.new(current_user: nil, parent_group: group)
+
+ expect(finder.execute).to contain_exactly(public_subgroup)
+ end
+
+ context 'when archived is `true`' do
+ let(:params) { { archived: 'true' } }
+
+ it 'includes archived projects in the count of subgroups' do
+ create(:project, namespace: subgroup, archived: true)
+
+ expect(finder.execute.first.preloaded_project_count).to eq(1)
+ end
+ end
+
+ context 'with a filter' do
+ let(:params) { { filter: 'test' } }
+
+ it 'contains only matching projects and subgroups' do
+ matching_project = create(:project, namespace: group, name: 'Testproject')
+ matching_subgroup = create(:group, name: 'testgroup', parent: group)
+
+ expect(finder.execute).to contain_exactly(matching_subgroup, matching_project)
+ end
+
+ it 'does not include subgroups the user does not have access to' do
+ _invisible_subgroup = create(:group, :private, parent: group, name: 'test1')
+ other_subgroup = create(:group, :private, parent: group, name: 'test2')
+ public_subgroup = create(:group, :public, parent: group, name: 'test3')
+ other_subsubgroup = create(:group, :private, parent: other_subgroup, name: 'test4')
+ other_user = create(:user)
+ other_subgroup.add_developer(other_user)
+
+ finder = described_class.new(current_user: other_user,
+ parent_group: group,
+ params: params)
+
+ expect(finder.execute).to contain_exactly(other_subgroup, public_subgroup, other_subsubgroup)
+ end
+
+ context 'with matching children' do
+ it 'includes a group that has a subgroup matching the query and its parent' do
+ matching_subgroup = create(:group, :private, name: 'testgroup', parent: subgroup)
+
+ expect(finder.execute).to contain_exactly(subgroup, matching_subgroup)
+ end
+
+ it 'includes the parent of a matching project' do
+ matching_project = create(:project, namespace: subgroup, name: 'Testproject')
+
+ expect(finder.execute).to contain_exactly(subgroup, matching_project)
+ end
+
+ it 'does not include the parent itself' do
+ group.update!(name: 'test')
+
+ expect(finder.execute).not_to include(group)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/fixtures/api/schemas/registry/tag.json b/spec/fixtures/api/schemas/registry/tag.json
index 5bc307e0e64..3a2c88791e1 100644
--- a/spec/fixtures/api/schemas/registry/tag.json
+++ b/spec/fixtures/api/schemas/registry/tag.json
@@ -14,6 +14,11 @@
"revision": {
"type": "string"
},
+ "short_revision": {
+ "type": "string",
+ "minLength": 9,
+ "maxLength": 9
+ },
"total_size": {
"type": "integer"
},
diff --git a/spec/fixtures/pages.tar.gz b/spec/fixtures/pages.tar.gz
index d0e89378b3e..5c4ea9690e8 100644
--- a/spec/fixtures/pages.tar.gz
+++ b/spec/fixtures/pages.tar.gz
Binary files differ
diff --git a/spec/fixtures/pages.zip b/spec/fixtures/pages.zip
index 9558fcd4b94..9bb75f953f8 100644
--- a/spec/fixtures/pages.zip
+++ b/spec/fixtures/pages.zip
Binary files differ
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 87ae1fa5660..7a241b02d28 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -309,4 +309,12 @@ describe ApplicationHelper do
end
end
end
+
+ describe '#locale_path' do
+ it 'returns the locale path with an `_`' do
+ Gitlab::I18n.with_locale('pt-BR') do
+ expect(helper.locale_path).to include('assets/locale/pt_BR/app')
+ end
+ end
+ end
end
diff --git a/spec/initializers/settings_spec.rb b/spec/initializers/settings_spec.rb
index 9a974e70e8c..a11824d0ac5 100644
--- a/spec/initializers/settings_spec.rb
+++ b/spec/initializers/settings_spec.rb
@@ -18,26 +18,6 @@ describe Settings do
end
end
- describe '#repositories' do
- it 'assigns the default failure attributes' do
- repository_settings = Gitlab.config.repositories.storages['broken']
-
- expect(repository_settings['failure_count_threshold']).to eq(10)
- expect(repository_settings['failure_wait_time']).to eq(30)
- expect(repository_settings['failure_reset_time']).to eq(1800)
- expect(repository_settings['storage_timeout']).to eq(5)
- end
-
- it 'can be accessed with dot syntax all the way down' do
- expect(Gitlab.config.repositories.storages.broken.failure_count_threshold).to eq(10)
- end
-
- it 'can be accessed in a very specific way that breaks without reassigning each element with Settingslogic' do
- storage_settings = Gitlab.config.repositories.storages['broken']
- expect(storage_settings.failure_count_threshold).to eq(10)
- end
- end
-
describe '#host_without_www' do
context 'URL with protocol' do
it 'returns the host' do
diff --git a/spec/javascripts/blob/blob_file_dropzone_spec.js b/spec/javascripts/blob/blob_file_dropzone_spec.js
index 2c8183ff77b..47de63e6690 100644
--- a/spec/javascripts/blob/blob_file_dropzone_spec.js
+++ b/spec/javascripts/blob/blob_file_dropzone_spec.js
@@ -1,4 +1,3 @@
-import 'dropzone';
import BlobFileDropzone from '~/blob/blob_file_dropzone';
describe('BlobFileDropzone', () => {
diff --git a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
index 67166802c70..2ecb64d84b5 100644
--- a/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_visual_tokens_spec.js
@@ -791,6 +791,29 @@ describe('Filtered Search Visual Tokens', () => {
expect(tokenValueElement.innerText.trim()).toBe(dummyUser.name);
const avatar = tokenValueElement.querySelector('img.avatar');
expect(avatar.src).toBe(dummyUser.avatar_url);
+ expect(avatar.alt).toBe('');
+ })
+ .then(done)
+ .catch(done.fail);
+ });
+
+ it('escapes user name when creating token', (done) => {
+ const dummyUser = {
+ name: '<script>',
+ avatar_url: `${gl.TEST_HOST}/mypics/avatar.png`,
+ };
+ const { tokenValueContainer, tokenValueElement } = findElements(authorToken);
+ const tokenValue = tokenValueElement.innerText;
+ usersCacheSpy = (username) => {
+ expect(`@${username}`).toBe(tokenValue);
+ return Promise.resolve(dummyUser);
+ };
+
+ subject.updateUserTokenAppearance(tokenValueContainer, tokenValueElement, tokenValue)
+ .then(() => {
+ expect(tokenValueElement.innerText.trim()).toBe(dummyUser.name);
+ tokenValueElement.querySelector('.avatar').remove();
+ expect(tokenValueElement.innerHTML.trim()).toBe(_.escape(dummyUser.name));
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/flash_spec.js b/spec/javascripts/flash_spec.js
index 060ffaa339b..b669aabcee4 100644
--- a/spec/javascripts/flash_spec.js
+++ b/spec/javascripts/flash_spec.js
@@ -2,6 +2,7 @@ import flash, {
createFlashEl,
createAction,
hideFlash,
+ removeFlashClickListener,
} from '~/flash';
describe('Flash', () => {
@@ -266,4 +267,24 @@ describe('Flash', () => {
});
});
});
+
+ describe('removeFlashClickListener', () => {
+ beforeEach(() => {
+ document.body.innerHTML += '<div class="flash-container"><div class="flash"></div></div>';
+ });
+
+ it('removes global flash on click', (done) => {
+ const flashEl = document.querySelector('.flash');
+
+ removeFlashClickListener(flashEl, false);
+
+ flashEl.parentNode.click();
+
+ setTimeout(() => {
+ expect(document.querySelector('.flash')).toBeNull();
+
+ done();
+ });
+ });
+ });
});
diff --git a/spec/javascripts/groups/components/app_spec.js b/spec/javascripts/groups/components/app_spec.js
new file mode 100644
index 00000000000..cd19a0fae1e
--- /dev/null
+++ b/spec/javascripts/groups/components/app_spec.js
@@ -0,0 +1,443 @@
+import Vue from 'vue';
+
+import appComponent from '~/groups/components/app.vue';
+import groupFolderComponent from '~/groups/components/group_folder.vue';
+import groupItemComponent from '~/groups/components/group_item.vue';
+
+import eventHub from '~/groups/event_hub';
+import GroupsStore from '~/groups/store/groups_store';
+import GroupsService from '~/groups/service/groups_service';
+
+import {
+ mockEndpoint, mockGroups, mockSearchedGroups,
+ mockRawPageInfo, mockParentGroupItem, mockRawChildren,
+ mockChildren, mockPageInfo,
+} from '../mock_data';
+
+const createComponent = (hideProjects = false) => {
+ const Component = Vue.extend(appComponent);
+ const store = new GroupsStore(false);
+ const service = new GroupsService(mockEndpoint);
+
+ return new Component({
+ propsData: {
+ store,
+ service,
+ hideProjects,
+ },
+ });
+};
+
+const returnServicePromise = (data, failed) => new Promise((resolve, reject) => {
+ if (failed) {
+ reject(data);
+ } else {
+ resolve({
+ json() {
+ return data;
+ },
+ });
+ }
+});
+
+describe('AppComponent', () => {
+ let vm;
+
+ beforeEach((done) => {
+ Vue.component('group-folder', groupFolderComponent);
+ Vue.component('group-item', groupItemComponent);
+
+ vm = createComponent();
+
+ Vue.nextTick(() => {
+ done();
+ });
+ });
+
+ describe('computed', () => {
+ beforeEach(() => {
+ vm.$mount();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('groups', () => {
+ it('should return list of groups from store', () => {
+ spyOn(vm.store, 'getGroups');
+
+ const groups = vm.groups;
+ expect(vm.store.getGroups).toHaveBeenCalled();
+ expect(groups).not.toBeDefined();
+ });
+ });
+
+ describe('pageInfo', () => {
+ it('should return pagination info from store', () => {
+ spyOn(vm.store, 'getPaginationInfo');
+
+ const pageInfo = vm.pageInfo;
+ expect(vm.store.getPaginationInfo).toHaveBeenCalled();
+ expect(pageInfo).not.toBeDefined();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ beforeEach(() => {
+ vm.$mount();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('fetchGroups', () => {
+ it('should call `getGroups` with all the params provided', (done) => {
+ spyOn(vm.service, 'getGroups').and.returnValue(returnServicePromise(mockGroups));
+
+ vm.fetchGroups({
+ parentId: 1,
+ page: 2,
+ filterGroupsBy: 'git',
+ sortBy: 'created_desc',
+ archived: true,
+ });
+ setTimeout(() => {
+ expect(vm.service.getGroups).toHaveBeenCalledWith(1, 2, 'git', 'created_desc', true);
+ done();
+ }, 0);
+ });
+
+ it('should set headers to store for building pagination info when called with `updatePagination`', (done) => {
+ spyOn(vm.service, 'getGroups').and.returnValue(returnServicePromise({ headers: mockRawPageInfo }));
+ spyOn(vm, 'updatePagination');
+
+ vm.fetchGroups({ updatePagination: true });
+ setTimeout(() => {
+ expect(vm.service.getGroups).toHaveBeenCalled();
+ expect(vm.updatePagination).toHaveBeenCalled();
+ done();
+ }, 0);
+ });
+
+ it('should show flash error when request fails', (done) => {
+ spyOn(vm.service, 'getGroups').and.returnValue(returnServicePromise(null, true));
+ spyOn($, 'scrollTo');
+ spyOn(window, 'Flash');
+
+ vm.fetchGroups({});
+ setTimeout(() => {
+ expect(vm.isLoading).toBeFalsy();
+ expect($.scrollTo).toHaveBeenCalledWith(0);
+ expect(window.Flash).toHaveBeenCalledWith('An error occurred. Please try again.');
+ done();
+ }, 0);
+ });
+ });
+
+ describe('fetchAllGroups', () => {
+ it('should fetch default set of groups', (done) => {
+ spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockGroups));
+ spyOn(vm, 'updatePagination').and.callThrough();
+ spyOn(vm, 'updateGroups').and.callThrough();
+
+ vm.fetchAllGroups();
+ expect(vm.isLoading).toBeTruthy();
+ expect(vm.fetchGroups).toHaveBeenCalled();
+ setTimeout(() => {
+ expect(vm.isLoading).toBeFalsy();
+ expect(vm.updateGroups).toHaveBeenCalled();
+ done();
+ }, 0);
+ });
+
+ it('should fetch matching set of groups when app is loaded with search query', (done) => {
+ spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockSearchedGroups));
+ spyOn(vm, 'updateGroups').and.callThrough();
+
+ vm.fetchAllGroups();
+ expect(vm.fetchGroups).toHaveBeenCalledWith({
+ page: null,
+ filterGroupsBy: null,
+ sortBy: null,
+ updatePagination: true,
+ archived: null,
+ });
+ setTimeout(() => {
+ expect(vm.updateGroups).toHaveBeenCalled();
+ done();
+ }, 0);
+ });
+ });
+
+ describe('fetchPage', () => {
+ it('should fetch groups for provided page details and update window state', (done) => {
+ spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockGroups));
+ spyOn(vm, 'updateGroups').and.callThrough();
+ spyOn(gl.utils, 'mergeUrlParams').and.callThrough();
+ spyOn(window.history, 'replaceState');
+ spyOn($, 'scrollTo');
+
+ vm.fetchPage(2, null, null, true);
+ expect(vm.isLoading).toBeTruthy();
+ expect(vm.fetchGroups).toHaveBeenCalledWith({
+ page: 2,
+ filterGroupsBy: null,
+ sortBy: null,
+ updatePagination: true,
+ archived: true,
+ });
+ setTimeout(() => {
+ expect(vm.isLoading).toBeFalsy();
+ expect($.scrollTo).toHaveBeenCalledWith(0);
+ expect(gl.utils.mergeUrlParams).toHaveBeenCalledWith({ page: 2 }, jasmine.any(String));
+ expect(window.history.replaceState).toHaveBeenCalledWith({
+ page: jasmine.any(String),
+ }, jasmine.any(String), jasmine.any(String));
+ expect(vm.updateGroups).toHaveBeenCalled();
+ done();
+ }, 0);
+ });
+ });
+
+ describe('toggleChildren', () => {
+ let groupItem;
+
+ beforeEach(() => {
+ groupItem = Object.assign({}, mockParentGroupItem);
+ groupItem.isOpen = false;
+ groupItem.isChildrenLoading = false;
+ });
+
+ it('should fetch children of given group and expand it if group is collapsed and children are not loaded', (done) => {
+ spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise(mockRawChildren));
+ spyOn(vm.store, 'setGroupChildren');
+
+ vm.toggleChildren(groupItem);
+ expect(groupItem.isChildrenLoading).toBeTruthy();
+ expect(vm.fetchGroups).toHaveBeenCalledWith({
+ parentId: groupItem.id,
+ });
+ setTimeout(() => {
+ expect(vm.store.setGroupChildren).toHaveBeenCalled();
+ done();
+ }, 0);
+ });
+
+ it('should skip network request while expanding group if children are already loaded', () => {
+ spyOn(vm, 'fetchGroups');
+ groupItem.children = mockRawChildren;
+
+ vm.toggleChildren(groupItem);
+ expect(vm.fetchGroups).not.toHaveBeenCalled();
+ expect(groupItem.isOpen).toBeTruthy();
+ });
+
+ it('should collapse group if it is already expanded', () => {
+ spyOn(vm, 'fetchGroups');
+ groupItem.isOpen = true;
+
+ vm.toggleChildren(groupItem);
+ expect(vm.fetchGroups).not.toHaveBeenCalled();
+ expect(groupItem.isOpen).toBeFalsy();
+ });
+
+ it('should set `isChildrenLoading` back to `false` if load request fails', (done) => {
+ spyOn(vm, 'fetchGroups').and.returnValue(returnServicePromise({}, true));
+
+ vm.toggleChildren(groupItem);
+ expect(groupItem.isChildrenLoading).toBeTruthy();
+ setTimeout(() => {
+ expect(groupItem.isChildrenLoading).toBeFalsy();
+ done();
+ }, 0);
+ });
+ });
+
+ describe('leaveGroup', () => {
+ let groupItem;
+ let childGroupItem;
+
+ beforeEach(() => {
+ groupItem = Object.assign({}, mockParentGroupItem);
+ groupItem.children = mockChildren;
+ childGroupItem = groupItem.children[0];
+ groupItem.isChildrenLoading = false;
+ });
+
+ it('should leave group and remove group item from tree', (done) => {
+ const notice = `You left the "${childGroupItem.fullName}" group.`;
+ spyOn(vm.service, 'leaveGroup').and.returnValue(returnServicePromise({ notice }));
+ spyOn(vm.store, 'removeGroup').and.callThrough();
+ spyOn(window, 'Flash');
+ spyOn($, 'scrollTo');
+
+ vm.leaveGroup(childGroupItem, groupItem);
+ expect(childGroupItem.isBeingRemoved).toBeTruthy();
+ expect(vm.service.leaveGroup).toHaveBeenCalledWith(childGroupItem.leavePath);
+ setTimeout(() => {
+ expect($.scrollTo).toHaveBeenCalledWith(0);
+ expect(vm.store.removeGroup).toHaveBeenCalledWith(childGroupItem, groupItem);
+ expect(window.Flash).toHaveBeenCalledWith(notice, 'notice');
+ done();
+ }, 0);
+ });
+
+ it('should show error flash message if request failed to leave group', (done) => {
+ const message = 'An error occurred. Please try again.';
+ spyOn(vm.service, 'leaveGroup').and.returnValue(returnServicePromise({ status: 500 }, true));
+ spyOn(vm.store, 'removeGroup').and.callThrough();
+ spyOn(window, 'Flash');
+
+ vm.leaveGroup(childGroupItem, groupItem);
+ expect(childGroupItem.isBeingRemoved).toBeTruthy();
+ expect(vm.service.leaveGroup).toHaveBeenCalledWith(childGroupItem.leavePath);
+ setTimeout(() => {
+ expect(vm.store.removeGroup).not.toHaveBeenCalled();
+ expect(window.Flash).toHaveBeenCalledWith(message);
+ expect(childGroupItem.isBeingRemoved).toBeFalsy();
+ done();
+ }, 0);
+ });
+
+ it('should show appropriate error flash message if request forbids to leave group', (done) => {
+ const message = 'Failed to leave the group. Please make sure you are not the only owner.';
+ spyOn(vm.service, 'leaveGroup').and.returnValue(returnServicePromise({ status: 403 }, true));
+ spyOn(vm.store, 'removeGroup').and.callThrough();
+ spyOn(window, 'Flash');
+
+ vm.leaveGroup(childGroupItem, groupItem);
+ expect(childGroupItem.isBeingRemoved).toBeTruthy();
+ expect(vm.service.leaveGroup).toHaveBeenCalledWith(childGroupItem.leavePath);
+ setTimeout(() => {
+ expect(vm.store.removeGroup).not.toHaveBeenCalled();
+ expect(window.Flash).toHaveBeenCalledWith(message);
+ expect(childGroupItem.isBeingRemoved).toBeFalsy();
+ done();
+ }, 0);
+ });
+ });
+
+ describe('updatePagination', () => {
+ it('should set pagination info to store from provided headers', () => {
+ spyOn(vm.store, 'setPaginationInfo');
+
+ vm.updatePagination(mockRawPageInfo);
+ expect(vm.store.setPaginationInfo).toHaveBeenCalledWith(mockRawPageInfo);
+ });
+ });
+
+ describe('updateGroups', () => {
+ it('should call setGroups on store if method was called directly', () => {
+ spyOn(vm.store, 'setGroups');
+
+ vm.updateGroups(mockGroups);
+ expect(vm.store.setGroups).toHaveBeenCalledWith(mockGroups);
+ });
+
+ it('should call setSearchedGroups on store if method was called with fromSearch param', () => {
+ spyOn(vm.store, 'setSearchedGroups');
+
+ vm.updateGroups(mockGroups, true);
+ expect(vm.store.setSearchedGroups).toHaveBeenCalledWith(mockGroups);
+ });
+
+ it('should set `isSearchEmpty` prop based on groups count', () => {
+ vm.updateGroups(mockGroups);
+ expect(vm.isSearchEmpty).toBeFalsy();
+
+ vm.updateGroups([]);
+ expect(vm.isSearchEmpty).toBeTruthy();
+ });
+ });
+ });
+
+ describe('created', () => {
+ it('should bind event listeners on eventHub', (done) => {
+ spyOn(eventHub, '$on');
+
+ const newVm = createComponent();
+ newVm.$mount();
+
+ Vue.nextTick(() => {
+ expect(eventHub.$on).toHaveBeenCalledWith('fetchPage', jasmine.any(Function));
+ expect(eventHub.$on).toHaveBeenCalledWith('toggleChildren', jasmine.any(Function));
+ expect(eventHub.$on).toHaveBeenCalledWith('leaveGroup', jasmine.any(Function));
+ expect(eventHub.$on).toHaveBeenCalledWith('updatePagination', jasmine.any(Function));
+ expect(eventHub.$on).toHaveBeenCalledWith('updateGroups', jasmine.any(Function));
+ newVm.$destroy();
+ done();
+ });
+ });
+
+ it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `false`', (done) => {
+ const newVm = createComponent();
+ newVm.$mount();
+ Vue.nextTick(() => {
+ expect(newVm.searchEmptyMessage).toBe('Sorry, no groups or projects matched your search');
+ newVm.$destroy();
+ done();
+ });
+ });
+
+ it('should initialize `searchEmptyMessage` prop with correct string when `hideProjects` is `true`', (done) => {
+ const newVm = createComponent(true);
+ newVm.$mount();
+ Vue.nextTick(() => {
+ expect(newVm.searchEmptyMessage).toBe('Sorry, no groups matched your search');
+ newVm.$destroy();
+ done();
+ });
+ });
+ });
+
+ describe('beforeDestroy', () => {
+ it('should unbind event listeners on eventHub', (done) => {
+ spyOn(eventHub, '$off');
+
+ const newVm = createComponent();
+ newVm.$mount();
+ newVm.$destroy();
+
+ Vue.nextTick(() => {
+ expect(eventHub.$off).toHaveBeenCalledWith('fetchPage', jasmine.any(Function));
+ expect(eventHub.$off).toHaveBeenCalledWith('toggleChildren', jasmine.any(Function));
+ expect(eventHub.$off).toHaveBeenCalledWith('leaveGroup', jasmine.any(Function));
+ expect(eventHub.$off).toHaveBeenCalledWith('updatePagination', jasmine.any(Function));
+ expect(eventHub.$off).toHaveBeenCalledWith('updateGroups', jasmine.any(Function));
+ done();
+ });
+ });
+ });
+
+ describe('template', () => {
+ beforeEach(() => {
+ vm.$mount();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render loading icon', (done) => {
+ vm.isLoading = true;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.loading-animation')).toBeDefined();
+ expect(vm.$el.querySelector('i.fa').getAttribute('aria-label')).toBe('Loading groups');
+ done();
+ });
+ });
+
+ it('should render groups tree', (done) => {
+ vm.groups = [mockParentGroupItem];
+ vm.isLoading = false;
+ vm.pageInfo = mockPageInfo;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.groups-list-tree-container')).toBeDefined();
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/groups/components/group_folder_spec.js b/spec/javascripts/groups/components/group_folder_spec.js
new file mode 100644
index 00000000000..4eb198595fb
--- /dev/null
+++ b/spec/javascripts/groups/components/group_folder_spec.js
@@ -0,0 +1,66 @@
+import Vue from 'vue';
+
+import groupFolderComponent from '~/groups/components/group_folder.vue';
+import groupItemComponent from '~/groups/components/group_item.vue';
+import { mockGroups, mockParentGroupItem } from '../mock_data';
+
+const createComponent = (groups = mockGroups, parentGroup = mockParentGroupItem) => {
+ const Component = Vue.extend(groupFolderComponent);
+
+ return new Component({
+ propsData: {
+ groups,
+ parentGroup,
+ },
+ });
+};
+
+describe('GroupFolderComponent', () => {
+ let vm;
+
+ beforeEach((done) => {
+ Vue.component('group-item', groupItemComponent);
+
+ vm = createComponent();
+ vm.$mount();
+
+ Vue.nextTick(() => {
+ done();
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('computed', () => {
+ describe('hasMoreChildren', () => {
+ it('should return false when childrenCount of group is less than MAX_CHILDREN_COUNT', () => {
+ expect(vm.hasMoreChildren).toBeFalsy();
+ });
+ });
+
+ describe('moreChildrenStats', () => {
+ it('should return message with count of excess children over MAX_CHILDREN_COUNT limit', () => {
+ expect(vm.moreChildrenStats).toBe('3 more items');
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render component template correctly', () => {
+ expect(vm.$el.classList.contains('group-list-tree')).toBeTruthy();
+ expect(vm.$el.querySelectorAll('li.group-row').length).toBe(7);
+ });
+
+ it('should render more children link when groups list has children over MAX_CHILDREN_COUNT limit', () => {
+ const parentGroup = Object.assign({}, mockParentGroupItem);
+ parentGroup.childrenCount = 21;
+
+ const newVm = createComponent(mockGroups, parentGroup);
+ newVm.$mount();
+ expect(newVm.$el.querySelector('li.group-row a.has-more-items')).toBeDefined();
+ newVm.$destroy();
+ });
+ });
+});
diff --git a/spec/javascripts/groups/components/group_item_spec.js b/spec/javascripts/groups/components/group_item_spec.js
new file mode 100644
index 00000000000..0f4fbdae445
--- /dev/null
+++ b/spec/javascripts/groups/components/group_item_spec.js
@@ -0,0 +1,177 @@
+import Vue from 'vue';
+
+import groupItemComponent from '~/groups/components/group_item.vue';
+import groupFolderComponent from '~/groups/components/group_folder.vue';
+import eventHub from '~/groups/event_hub';
+import { mockParentGroupItem, mockChildren } from '../mock_data';
+
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren[0]) => {
+ const Component = Vue.extend(groupItemComponent);
+
+ return mountComponent(Component, {
+ group,
+ parentGroup,
+ });
+};
+
+describe('GroupItemComponent', () => {
+ let vm;
+
+ beforeEach((done) => {
+ Vue.component('group-folder', groupFolderComponent);
+
+ vm = createComponent();
+
+ Vue.nextTick(() => {
+ done();
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('computed', () => {
+ describe('groupDomId', () => {
+ it('should return ID string suffixed with group ID', () => {
+ expect(vm.groupDomId).toBe('group-55');
+ });
+ });
+
+ describe('rowClass', () => {
+ it('should return map of classes based on group details', () => {
+ const classes = ['is-open', 'has-children', 'has-description', 'being-removed'];
+ const rowClass = vm.rowClass;
+
+ expect(Object.keys(rowClass).length).toBe(classes.length);
+ Object.keys(rowClass).forEach((className) => {
+ expect(classes.indexOf(className) > -1).toBeTruthy();
+ });
+ });
+ });
+
+ describe('hasChildren', () => {
+ it('should return boolean value representing if group has any children present', () => {
+ let newVm;
+ const group = Object.assign({}, mockParentGroupItem);
+
+ group.childrenCount = 5;
+ newVm = createComponent(group);
+ expect(newVm.hasChildren).toBeTruthy();
+ newVm.$destroy();
+
+ group.childrenCount = 0;
+ newVm = createComponent(group);
+ expect(newVm.hasChildren).toBeFalsy();
+ newVm.$destroy();
+ });
+ });
+
+ describe('hasAvatar', () => {
+ it('should return boolean value representing if group has any avatar present', () => {
+ let newVm;
+ const group = Object.assign({}, mockParentGroupItem);
+
+ group.avatarUrl = null;
+ newVm = createComponent(group);
+ expect(newVm.hasAvatar).toBeFalsy();
+ newVm.$destroy();
+
+ group.avatarUrl = '/uploads/group_avatar.png';
+ newVm = createComponent(group);
+ expect(newVm.hasAvatar).toBeTruthy();
+ newVm.$destroy();
+ });
+ });
+
+ describe('isGroup', () => {
+ it('should return boolean value representing if group item is of type `group` or not', () => {
+ let newVm;
+ const group = Object.assign({}, mockParentGroupItem);
+
+ group.type = 'group';
+ newVm = createComponent(group);
+ expect(newVm.isGroup).toBeTruthy();
+ newVm.$destroy();
+
+ group.type = 'project';
+ newVm = createComponent(group);
+ expect(newVm.isGroup).toBeFalsy();
+ newVm.$destroy();
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('onClickRowGroup', () => {
+ let event;
+
+ beforeEach(() => {
+ const classList = {
+ contains() {
+ return false;
+ },
+ };
+
+ event = {
+ target: {
+ classList,
+ parentElement: {
+ classList,
+ },
+ },
+ };
+ });
+
+ it('should emit `toggleChildren` event when expand is clicked on a group and it has children present', () => {
+ spyOn(eventHub, '$emit');
+
+ vm.onClickRowGroup(event);
+ expect(eventHub.$emit).toHaveBeenCalledWith('toggleChildren', vm.group);
+ });
+
+ it('should navigate page to group homepage if group does not have any children present', (done) => {
+ const group = Object.assign({}, mockParentGroupItem);
+ group.childrenCount = 0;
+ const newVm = createComponent(group);
+ spyOn(gl.utils, 'visitUrl').and.stub();
+ spyOn(eventHub, '$emit');
+
+ newVm.onClickRowGroup(event);
+ setTimeout(() => {
+ expect(eventHub.$emit).not.toHaveBeenCalled();
+ expect(gl.utils.visitUrl).toHaveBeenCalledWith(newVm.group.relativePath);
+ done();
+ }, 0);
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render component template correctly', () => {
+ expect(vm.$el.getAttribute('id')).toBe('group-55');
+ expect(vm.$el.classList.contains('group-row')).toBeTruthy();
+
+ expect(vm.$el.querySelector('.group-row-contents')).toBeDefined();
+ expect(vm.$el.querySelector('.group-row-contents .controls')).toBeDefined();
+ expect(vm.$el.querySelector('.group-row-contents .stats')).toBeDefined();
+
+ expect(vm.$el.querySelector('.folder-toggle-wrap')).toBeDefined();
+ expect(vm.$el.querySelector('.folder-toggle-wrap .folder-caret')).toBeDefined();
+ expect(vm.$el.querySelector('.folder-toggle-wrap .item-type-icon')).toBeDefined();
+
+ expect(vm.$el.querySelector('.avatar-container')).toBeDefined();
+ expect(vm.$el.querySelector('.avatar-container a.no-expand')).toBeDefined();
+ expect(vm.$el.querySelector('.avatar-container .avatar')).toBeDefined();
+
+ expect(vm.$el.querySelector('.title')).toBeDefined();
+ expect(vm.$el.querySelector('.title a.no-expand')).toBeDefined();
+ expect(vm.$el.querySelector('.access-type')).toBeDefined();
+ expect(vm.$el.querySelector('.description')).toBeDefined();
+
+ expect(vm.$el.querySelector('.group-list-tree')).toBeDefined();
+ });
+ });
+});
diff --git a/spec/javascripts/groups/components/groups_spec.js b/spec/javascripts/groups/components/groups_spec.js
new file mode 100644
index 00000000000..90e818c1545
--- /dev/null
+++ b/spec/javascripts/groups/components/groups_spec.js
@@ -0,0 +1,70 @@
+import Vue from 'vue';
+
+import groupsComponent from '~/groups/components/groups.vue';
+import groupFolderComponent from '~/groups/components/group_folder.vue';
+import groupItemComponent from '~/groups/components/group_item.vue';
+import eventHub from '~/groups/event_hub';
+import { mockGroups, mockPageInfo } from '../mock_data';
+
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const createComponent = (searchEmpty = false) => {
+ const Component = Vue.extend(groupsComponent);
+
+ return mountComponent(Component, {
+ groups: mockGroups,
+ pageInfo: mockPageInfo,
+ searchEmptyMessage: 'No matching results',
+ searchEmpty,
+ });
+};
+
+describe('GroupsComponent', () => {
+ let vm;
+
+ beforeEach((done) => {
+ Vue.component('group-folder', groupFolderComponent);
+ Vue.component('group-item', groupItemComponent);
+
+ vm = createComponent();
+
+ Vue.nextTick(() => {
+ done();
+ });
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('methods', () => {
+ describe('change', () => {
+ it('should emit `fetchPage` event when page is changed via pagination', () => {
+ spyOn(eventHub, '$emit').and.stub();
+
+ vm.change(2);
+ expect(eventHub.$emit).toHaveBeenCalledWith('fetchPage', 2, jasmine.any(Object), jasmine.any(Object), jasmine.any(Object));
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render component template correctly', (done) => {
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.groups-list-tree-container')).toBeDefined();
+ expect(vm.$el.querySelector('.group-list-tree')).toBeDefined();
+ expect(vm.$el.querySelector('.gl-pagination')).toBeDefined();
+ expect(vm.$el.querySelectorAll('.has-no-search-results').length === 0).toBeTruthy();
+ done();
+ });
+ });
+
+ it('should render empty search message when `searchEmpty` is `true`', (done) => {
+ vm.searchEmpty = true;
+ Vue.nextTick(() => {
+ expect(vm.$el.querySelector('.has-no-search-results')).toBeDefined();
+ done();
+ });
+ });
+ });
+});
diff --git a/spec/javascripts/groups/components/item_actions_spec.js b/spec/javascripts/groups/components/item_actions_spec.js
new file mode 100644
index 00000000000..2ce1a749a96
--- /dev/null
+++ b/spec/javascripts/groups/components/item_actions_spec.js
@@ -0,0 +1,110 @@
+import Vue from 'vue';
+
+import itemActionsComponent from '~/groups/components/item_actions.vue';
+import eventHub from '~/groups/event_hub';
+import { mockParentGroupItem, mockChildren } from '../mock_data';
+
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const createComponent = (group = mockParentGroupItem, parentGroup = mockChildren[0]) => {
+ const Component = Vue.extend(itemActionsComponent);
+
+ return mountComponent(Component, {
+ group,
+ parentGroup,
+ });
+};
+
+describe('ItemActionsComponent', () => {
+ let vm;
+
+ beforeEach(() => {
+ vm = createComponent();
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ describe('computed', () => {
+ describe('leaveConfirmationMessage', () => {
+ it('should return appropriate string for leave group confirmation', () => {
+ expect(vm.leaveConfirmationMessage).toBe('Are you sure you want to leave the "platform / hardware" group?');
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('onLeaveGroup', () => {
+ it('should change `dialogStatus` prop to `true` which shows confirmation dialog', () => {
+ expect(vm.dialogStatus).toBeFalsy();
+ vm.onLeaveGroup();
+ expect(vm.dialogStatus).toBeTruthy();
+ });
+ });
+
+ describe('leaveGroup', () => {
+ it('should change `dialogStatus` prop to `false` and emit `leaveGroup` event with required params when called with `leaveConfirmed` as `true`', () => {
+ spyOn(eventHub, '$emit');
+ vm.dialogStatus = true;
+ vm.leaveGroup(true);
+ expect(vm.dialogStatus).toBeFalsy();
+ expect(eventHub.$emit).toHaveBeenCalledWith('leaveGroup', vm.group, vm.parentGroup);
+ });
+
+ it('should change `dialogStatus` prop to `false` and should NOT emit `leaveGroup` event when called with `leaveConfirmed` as `false`', () => {
+ spyOn(eventHub, '$emit');
+ vm.dialogStatus = true;
+ vm.leaveGroup(false);
+ expect(vm.dialogStatus).toBeFalsy();
+ expect(eventHub.$emit).not.toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render component template correctly', () => {
+ expect(vm.$el.classList.contains('controls')).toBeTruthy();
+ });
+
+ it('should render Edit Group button with correct attribute values', () => {
+ const group = Object.assign({}, mockParentGroupItem);
+ group.canEdit = true;
+ const newVm = createComponent(group);
+
+ const editBtn = newVm.$el.querySelector('a.edit-group');
+ expect(editBtn).toBeDefined();
+ expect(editBtn.classList.contains('no-expand')).toBeTruthy();
+ expect(editBtn.getAttribute('href')).toBe(group.editPath);
+ expect(editBtn.getAttribute('aria-label')).toBe('Edit group');
+ expect(editBtn.dataset.originalTitle).toBe('Edit group');
+ expect(editBtn.querySelector('i.fa.fa-cogs')).toBeDefined();
+
+ newVm.$destroy();
+ });
+
+ it('should render Leave Group button with correct attribute values', () => {
+ const group = Object.assign({}, mockParentGroupItem);
+ group.canLeave = true;
+ const newVm = createComponent(group);
+
+ const leaveBtn = newVm.$el.querySelector('a.leave-group');
+ expect(leaveBtn).toBeDefined();
+ expect(leaveBtn.classList.contains('no-expand')).toBeTruthy();
+ expect(leaveBtn.getAttribute('href')).toBe(group.leavePath);
+ expect(leaveBtn.getAttribute('aria-label')).toBe('Leave this group');
+ expect(leaveBtn.dataset.originalTitle).toBe('Leave this group');
+ expect(leaveBtn.querySelector('i.fa.fa-sign-out')).toBeDefined();
+
+ newVm.$destroy();
+ });
+
+ it('should show modal dialog when `dialogStatus` is set to `true`', () => {
+ vm.dialogStatus = true;
+ const modalDialogEl = vm.$el.querySelector('.modal.popup-dialog');
+ expect(modalDialogEl).toBeDefined();
+ expect(modalDialogEl.querySelector('.modal-title').innerText.trim()).toBe('Are you sure?');
+ expect(modalDialogEl.querySelector('.btn.btn-warning').innerText.trim()).toBe('Leave');
+ });
+ });
+});
diff --git a/spec/javascripts/groups/components/item_caret_spec.js b/spec/javascripts/groups/components/item_caret_spec.js
new file mode 100644
index 00000000000..4310a07e6e6
--- /dev/null
+++ b/spec/javascripts/groups/components/item_caret_spec.js
@@ -0,0 +1,40 @@
+import Vue from 'vue';
+
+import itemCaretComponent from '~/groups/components/item_caret.vue';
+
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const createComponent = (isGroupOpen = false) => {
+ const Component = Vue.extend(itemCaretComponent);
+
+ return mountComponent(Component, {
+ isGroupOpen,
+ });
+};
+
+describe('ItemCaretComponent', () => {
+ describe('template', () => {
+ it('should render component template correctly', () => {
+ const vm = createComponent();
+ vm.$mount();
+ expect(vm.$el.classList.contains('folder-caret')).toBeTruthy();
+ vm.$destroy();
+ });
+
+ it('should render caret down icon if `isGroupOpen` prop is `true`', () => {
+ const vm = createComponent(true);
+ vm.$mount();
+ expect(vm.$el.querySelectorAll('i.fa.fa-caret-down').length).toBe(1);
+ expect(vm.$el.querySelectorAll('i.fa.fa-caret-right').length).toBe(0);
+ vm.$destroy();
+ });
+
+ it('should render caret right icon if `isGroupOpen` prop is `false`', () => {
+ const vm = createComponent();
+ vm.$mount();
+ expect(vm.$el.querySelectorAll('i.fa.fa-caret-down').length).toBe(0);
+ expect(vm.$el.querySelectorAll('i.fa.fa-caret-right').length).toBe(1);
+ vm.$destroy();
+ });
+ });
+});
diff --git a/spec/javascripts/groups/components/item_stats_spec.js b/spec/javascripts/groups/components/item_stats_spec.js
new file mode 100644
index 00000000000..e200f9f08bd
--- /dev/null
+++ b/spec/javascripts/groups/components/item_stats_spec.js
@@ -0,0 +1,159 @@
+import Vue from 'vue';
+
+import itemStatsComponent from '~/groups/components/item_stats.vue';
+import {
+ mockParentGroupItem,
+ ITEM_TYPE,
+ VISIBILITY_TYPE_ICON,
+ GROUP_VISIBILITY_TYPE,
+ PROJECT_VISIBILITY_TYPE,
+} from '../mock_data';
+
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const createComponent = (item = mockParentGroupItem) => {
+ const Component = Vue.extend(itemStatsComponent);
+
+ return mountComponent(Component, {
+ item,
+ });
+};
+
+describe('ItemStatsComponent', () => {
+ describe('computed', () => {
+ describe('visibilityIcon', () => {
+ it('should return icon class based on `item.visibility` value', () => {
+ Object.keys(VISIBILITY_TYPE_ICON).forEach((visibility) => {
+ const item = Object.assign({}, mockParentGroupItem, { visibility });
+ const vm = createComponent(item);
+ vm.$mount();
+ expect(vm.visibilityIcon).toBe(VISIBILITY_TYPE_ICON[visibility]);
+ vm.$destroy();
+ });
+ });
+ });
+
+ describe('visibilityTooltip', () => {
+ it('should return tooltip string for Group based on `item.visibility` value', () => {
+ Object.keys(GROUP_VISIBILITY_TYPE).forEach((visibility) => {
+ const item = Object.assign({}, mockParentGroupItem, {
+ visibility,
+ type: ITEM_TYPE.GROUP,
+ });
+ const vm = createComponent(item);
+ vm.$mount();
+ expect(vm.visibilityTooltip).toBe(GROUP_VISIBILITY_TYPE[visibility]);
+ vm.$destroy();
+ });
+ });
+
+ it('should return tooltip string for Project based on `item.visibility` value', () => {
+ Object.keys(PROJECT_VISIBILITY_TYPE).forEach((visibility) => {
+ const item = Object.assign({}, mockParentGroupItem, {
+ visibility,
+ type: ITEM_TYPE.PROJECT,
+ });
+ const vm = createComponent(item);
+ vm.$mount();
+ expect(vm.visibilityTooltip).toBe(PROJECT_VISIBILITY_TYPE[visibility]);
+ vm.$destroy();
+ });
+ });
+ });
+
+ describe('isProject', () => {
+ it('should return boolean value representing whether `item.type` is Project or not', () => {
+ let item;
+ let vm;
+
+ item = Object.assign({}, mockParentGroupItem, { type: ITEM_TYPE.PROJECT });
+ vm = createComponent(item);
+ vm.$mount();
+ expect(vm.isProject).toBeTruthy();
+ vm.$destroy();
+
+ item = Object.assign({}, mockParentGroupItem, { type: ITEM_TYPE.GROUP });
+ vm = createComponent(item);
+ vm.$mount();
+ expect(vm.isProject).toBeFalsy();
+ vm.$destroy();
+ });
+ });
+
+ describe('isGroup', () => {
+ it('should return boolean value representing whether `item.type` is Group or not', () => {
+ let item;
+ let vm;
+
+ item = Object.assign({}, mockParentGroupItem, { type: ITEM_TYPE.GROUP });
+ vm = createComponent(item);
+ vm.$mount();
+ expect(vm.isGroup).toBeTruthy();
+ vm.$destroy();
+
+ item = Object.assign({}, mockParentGroupItem, { type: ITEM_TYPE.PROJECT });
+ vm = createComponent(item);
+ vm.$mount();
+ expect(vm.isGroup).toBeFalsy();
+ vm.$destroy();
+ });
+ });
+ });
+
+ describe('template', () => {
+ it('should render component template correctly', () => {
+ const vm = createComponent();
+ vm.$mount();
+
+ const visibilityIconEl = vm.$el.querySelector('.item-visibility');
+ expect(vm.$el.classList.contains('.stats')).toBeDefined();
+ expect(visibilityIconEl).toBeDefined();
+ expect(visibilityIconEl.dataset.originalTitle).toBe(vm.visibilityTooltip);
+ expect(visibilityIconEl.querySelector('i.fa')).toBeDefined();
+
+ vm.$destroy();
+ });
+
+ it('should render stat icons if `item.type` is Group', () => {
+ const item = Object.assign({}, mockParentGroupItem, { type: ITEM_TYPE.GROUP });
+ const vm = createComponent(item);
+ vm.$mount();
+
+ const subgroupIconEl = vm.$el.querySelector('span.number-subgroups');
+ expect(subgroupIconEl).toBeDefined();
+ expect(subgroupIconEl.dataset.originalTitle).toBe('Subgroups');
+ expect(subgroupIconEl.querySelector('i.fa.fa-folder')).toBeDefined();
+ expect(subgroupIconEl.innerText.trim()).toBe(`${vm.item.subgroupCount}`);
+
+ const projectsIconEl = vm.$el.querySelector('span.number-projects');
+ expect(projectsIconEl).toBeDefined();
+ expect(projectsIconEl.dataset.originalTitle).toBe('Projects');
+ expect(projectsIconEl.querySelector('i.fa.fa-bookmark')).toBeDefined();
+ expect(projectsIconEl.innerText.trim()).toBe(`${vm.item.projectCount}`);
+
+ const membersIconEl = vm.$el.querySelector('span.number-users');
+ expect(membersIconEl).toBeDefined();
+ expect(membersIconEl.dataset.originalTitle).toBe('Members');
+ expect(membersIconEl.querySelector('i.fa.fa-users')).toBeDefined();
+ expect(membersIconEl.innerText.trim()).toBe(`${vm.item.memberCount}`);
+
+ vm.$destroy();
+ });
+
+ it('should render stat icons if `item.type` is Project', () => {
+ const item = Object.assign({}, mockParentGroupItem, {
+ type: ITEM_TYPE.PROJECT,
+ starCount: 4,
+ });
+ const vm = createComponent(item);
+ vm.$mount();
+
+ const projectStarIconEl = vm.$el.querySelector('.project-stars');
+ expect(projectStarIconEl).toBeDefined();
+ expect(projectStarIconEl.querySelector('i.fa.fa-star')).toBeDefined();
+ expect(projectStarIconEl.innerText.trim()).toBe(`${vm.item.starCount}`);
+
+ vm.$destroy();
+ });
+ });
+});
diff --git a/spec/javascripts/groups/components/item_type_icon_spec.js b/spec/javascripts/groups/components/item_type_icon_spec.js
new file mode 100644
index 00000000000..528e6ed1b4c
--- /dev/null
+++ b/spec/javascripts/groups/components/item_type_icon_spec.js
@@ -0,0 +1,54 @@
+import Vue from 'vue';
+
+import itemTypeIconComponent from '~/groups/components/item_type_icon.vue';
+import { ITEM_TYPE } from '../mock_data';
+
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+const createComponent = (itemType = ITEM_TYPE.GROUP, isGroupOpen = false) => {
+ const Component = Vue.extend(itemTypeIconComponent);
+
+ return mountComponent(Component, {
+ itemType,
+ isGroupOpen,
+ });
+};
+
+describe('ItemTypeIconComponent', () => {
+ describe('template', () => {
+ it('should render component template correctly', () => {
+ const vm = createComponent();
+ vm.$mount();
+ expect(vm.$el.classList.contains('item-type-icon')).toBeTruthy();
+ vm.$destroy();
+ });
+
+ it('should render folder open or close icon based `isGroupOpen` prop value', () => {
+ let vm;
+
+ vm = createComponent(ITEM_TYPE.GROUP, true);
+ vm.$mount();
+ expect(vm.$el.querySelector('i.fa.fa-folder-open')).toBeDefined();
+ vm.$destroy();
+
+ vm = createComponent(ITEM_TYPE.GROUP);
+ vm.$mount();
+ expect(vm.$el.querySelector('i.fa.fa-folder')).toBeDefined();
+ vm.$destroy();
+ });
+
+ it('should render bookmark icon based on `isProject` prop value', () => {
+ let vm;
+
+ vm = createComponent(ITEM_TYPE.PROJECT);
+ vm.$mount();
+ expect(vm.$el.querySelectorAll('i.fa.fa-bookmark').length).toBe(1);
+ vm.$destroy();
+
+ vm = createComponent(ITEM_TYPE.GROUP);
+ vm.$mount();
+ expect(vm.$el.querySelectorAll('i.fa.fa-bookmark').length).toBe(0);
+ vm.$destroy();
+ });
+ });
+});
diff --git a/spec/javascripts/groups/group_item_spec.js b/spec/javascripts/groups/group_item_spec.js
deleted file mode 100644
index 25e10552d95..00000000000
--- a/spec/javascripts/groups/group_item_spec.js
+++ /dev/null
@@ -1,102 +0,0 @@
-import Vue from 'vue';
-import groupItemComponent from '~/groups/components/group_item.vue';
-import GroupsStore from '~/groups/stores/groups_store';
-import { group1 } from './mock_data';
-
-describe('Groups Component', () => {
- let GroupItemComponent;
- let component;
- let store;
- let group;
-
- describe('group with default data', () => {
- beforeEach((done) => {
- GroupItemComponent = Vue.extend(groupItemComponent);
- store = new GroupsStore();
- group = store.decorateGroup(group1);
-
- component = new GroupItemComponent({
- propsData: {
- group,
- },
- }).$mount();
-
- Vue.nextTick(() => {
- done();
- });
- });
-
- afterEach(() => {
- component.$destroy();
- });
-
- it('should render the group item correctly', () => {
- expect(component.$el.classList.contains('group-row')).toBe(true);
- expect(component.$el.classList.contains('.no-description')).toBe(false);
- expect(component.$el.querySelector('.number-projects').textContent).toContain(group.numberProjects);
- expect(component.$el.querySelector('.number-users').textContent).toContain(group.numberUsers);
- expect(component.$el.querySelector('.group-visibility')).toBeDefined();
- expect(component.$el.querySelector('.avatar-container')).toBeDefined();
- expect(component.$el.querySelector('.title').textContent).toContain(group.name);
- expect(component.$el.querySelector('.access-type').textContent).toContain(group.permissions.humanGroupAccess);
- expect(component.$el.querySelector('.description').textContent).toContain(group.description);
- expect(component.$el.querySelector('.edit-group')).toBeDefined();
- expect(component.$el.querySelector('.leave-group')).toBeDefined();
- });
- });
-
- describe('group without description', () => {
- beforeEach((done) => {
- GroupItemComponent = Vue.extend(groupItemComponent);
- store = new GroupsStore();
- group1.description = '';
- group = store.decorateGroup(group1);
-
- component = new GroupItemComponent({
- propsData: {
- group,
- },
- }).$mount();
-
- Vue.nextTick(() => {
- done();
- });
- });
-
- afterEach(() => {
- component.$destroy();
- });
-
- it('should render group item correctly', () => {
- expect(component.$el.querySelector('.description').textContent).toBe('');
- expect(component.$el.classList.contains('.no-description')).toBe(false);
- });
- });
-
- describe('user has not access to group', () => {
- beforeEach((done) => {
- GroupItemComponent = Vue.extend(groupItemComponent);
- store = new GroupsStore();
- group1.permissions.human_group_access = null;
- group = store.decorateGroup(group1);
-
- component = new GroupItemComponent({
- propsData: {
- group,
- },
- }).$mount();
-
- Vue.nextTick(() => {
- done();
- });
- });
-
- afterEach(() => {
- component.$destroy();
- });
-
- it('should not display access type', () => {
- expect(component.$el.querySelector('.access-type')).toBeNull();
- });
- });
-});
diff --git a/spec/javascripts/groups/groups_spec.js b/spec/javascripts/groups/groups_spec.js
deleted file mode 100644
index b14153dbbfa..00000000000
--- a/spec/javascripts/groups/groups_spec.js
+++ /dev/null
@@ -1,99 +0,0 @@
-import Vue from 'vue';
-import eventHub from '~/groups/event_hub';
-import groupFolderComponent from '~/groups/components/group_folder.vue';
-import groupItemComponent from '~/groups/components/group_item.vue';
-import groupsComponent from '~/groups/components/groups.vue';
-import GroupsStore from '~/groups/stores/groups_store';
-import { groupsData } from './mock_data';
-
-describe('Groups Component', () => {
- let GroupsComponent;
- let store;
- let component;
- let groups;
-
- beforeEach((done) => {
- Vue.component('group-folder', groupFolderComponent);
- Vue.component('group-item', groupItemComponent);
-
- store = new GroupsStore();
- groups = store.setGroups(groupsData.groups);
-
- store.storePagination(groupsData.pagination);
-
- GroupsComponent = Vue.extend(groupsComponent);
-
- component = new GroupsComponent({
- propsData: {
- groups: store.state.groups,
- pageInfo: store.state.pageInfo,
- },
- }).$mount();
-
- Vue.nextTick(() => {
- done();
- });
- });
-
- afterEach(() => {
- component.$destroy();
- });
-
- describe('with data', () => {
- it('should render a list of groups', () => {
- expect(component.$el.classList.contains('groups-list-tree-container')).toBe(true);
- expect(component.$el.querySelector('#group-12')).toBeDefined();
- expect(component.$el.querySelector('#group-1119')).toBeDefined();
- expect(component.$el.querySelector('#group-1120')).toBeDefined();
- });
-
- it('should respect the order of groups', () => {
- const wrap = component.$el.querySelector('.groups-list-tree-container > .group-list-tree');
- expect(wrap.querySelector('.group-row:nth-child(1)').id).toBe('group-12');
- expect(wrap.querySelector('.group-row:nth-child(2)').id).toBe('group-1119');
- });
-
- it('should render group and its subgroup', () => {
- const lists = component.$el.querySelectorAll('.group-list-tree');
-
- expect(lists.length).toBe(3); // one parent and two subgroups
-
- expect(lists[0].querySelector('#group-1119').classList.contains('is-open')).toBe(true);
- expect(lists[0].querySelector('#group-1119').classList.contains('has-subgroups')).toBe(true);
-
- expect(lists[2].querySelector('#group-1120').textContent).toContain(groups.id1119.subGroups.id1120.name);
- });
-
- it('should render group identicon when group avatar is not present', () => {
- const avatar = component.$el.querySelector('#group-12 .avatar-container .avatar');
- expect(avatar.nodeName).toBe('DIV');
- expect(avatar.classList.contains('identicon')).toBeTruthy();
- expect(avatar.getAttribute('style').indexOf('background-color') > -1).toBeTruthy();
- });
-
- it('should render group avatar when group avatar is present', () => {
- const avatar = component.$el.querySelector('#group-1120 .avatar-container .avatar');
- expect(avatar.nodeName).toBe('IMG');
- expect(avatar.classList.contains('identicon')).toBeFalsy();
- });
-
- it('should remove prefix of parent group', () => {
- expect(component.$el.querySelector('#group-12 #group-1128 .title').textContent).toContain('level2 / level3 / level4');
- });
-
- it('should remove the group after leaving the group', (done) => {
- spyOn(window, 'confirm').and.returnValue(true);
-
- eventHub.$on('leaveGroup', (group, collection) => {
- store.removeGroup(group, collection);
- });
-
- component.$el.querySelector('#group-12 .leave-group').click();
-
- Vue.nextTick(() => {
- expect(component.$el.querySelector('#group-12')).toBeNull();
- done();
- });
- });
- });
-});
diff --git a/spec/javascripts/groups/mock_data.js b/spec/javascripts/groups/mock_data.js
index 5bb84b591f4..6184d671790 100644
--- a/spec/javascripts/groups/mock_data.js
+++ b/spec/javascripts/groups/mock_data.js
@@ -1,114 +1,380 @@
-const group1 = {
- id: 12,
- name: 'level1',
- path: 'level1',
- description: 'foo',
- visibility: 'public',
- avatar_url: null,
- web_url: 'http://localhost:3000/groups/level1',
- group_path: '/level1',
- full_name: 'level1',
- full_path: 'level1',
- parent_id: null,
- created_at: '2017-05-15T19:01:23.670Z',
- updated_at: '2017-05-15T19:01:23.670Z',
- number_projects_with_delimiter: '1',
- number_users_with_delimiter: '1',
- has_subgroups: true,
- permissions: {
- human_group_access: 'Master',
- },
+export const mockEndpoint = '/dashboard/groups.json';
+
+export const ITEM_TYPE = {
+ PROJECT: 'project',
+ GROUP: 'group',
};
-// This group has no direct parent, should be placed as subgroup of group1
-const group14 = {
- id: 1128,
- name: 'level4',
- path: 'level4',
- description: 'foo',
- visibility: 'public',
- avatar_url: null,
- web_url: 'http://localhost:3000/groups/level1/level2/level3/level4',
- group_path: '/level1/level2/level3/level4',
- full_name: 'level1 / level2 / level3 / level4',
- full_path: 'level1/level2/level3/level4',
- parent_id: 1127,
- created_at: '2017-05-15T19:02:01.645Z',
- updated_at: '2017-05-15T19:02:01.645Z',
- number_projects_with_delimiter: '1',
- number_users_with_delimiter: '1',
- has_subgroups: true,
- permissions: {
- human_group_access: 'Master',
- },
+export const GROUP_VISIBILITY_TYPE = {
+ public: 'Public - The group and any public projects can be viewed without any authentication.',
+ internal: 'Internal - The group and any internal projects can be viewed by any logged in user.',
+ private: 'Private - The group and its projects can only be viewed by members.',
};
-const group2 = {
- id: 1119,
- name: 'devops',
- path: 'devops',
- description: 'foo',
- visibility: 'public',
- avatar_url: null,
- web_url: 'http://localhost:3000/groups/devops',
- group_path: '/devops',
- full_name: 'devops',
- full_path: 'devops',
- parent_id: null,
- created_at: '2017-05-11T19:35:09.635Z',
- updated_at: '2017-05-11T19:35:09.635Z',
- number_projects_with_delimiter: '1',
- number_users_with_delimiter: '1',
- has_subgroups: true,
- permissions: {
- human_group_access: 'Master',
- },
+export const PROJECT_VISIBILITY_TYPE = {
+ public: 'Public - The project can be accessed without any authentication.',
+ internal: 'Internal - The project can be accessed by any logged in user.',
+ private: 'Private - Project access must be granted explicitly to each user.',
+};
+
+export const VISIBILITY_TYPE_ICON = {
+ public: 'fa-globe',
+ internal: 'fa-shield',
+ private: 'fa-lock',
};
-const group21 = {
- id: 1120,
- name: 'chef',
- path: 'chef',
- description: 'foo',
+export const mockParentGroupItem = {
+ id: 55,
+ name: 'hardware',
+ description: '',
visibility: 'public',
- avatar_url: '/uploads/-/system/group/avatar/2/GitLab.png',
- web_url: 'http://localhost:3000/groups/devops/chef',
- group_path: '/devops/chef',
- full_name: 'devops / chef',
- full_path: 'devops/chef',
- parent_id: 1119,
- created_at: '2017-05-11T19:51:04.060Z',
- updated_at: '2017-05-11T19:51:04.060Z',
- number_projects_with_delimiter: '1',
- number_users_with_delimiter: '1',
- has_subgroups: true,
- permissions: {
- human_group_access: 'Master',
- },
+ fullName: 'platform / hardware',
+ relativePath: '/platform/hardware',
+ canEdit: true,
+ type: 'group',
+ avatarUrl: null,
+ permission: 'Owner',
+ editPath: '/groups/platform/hardware/edit',
+ childrenCount: 3,
+ leavePath: '/groups/platform/hardware/group_members/leave',
+ parentId: 54,
+ memberCount: '1',
+ projectCount: 1,
+ subgroupCount: 2,
+ canLeave: false,
+ children: [],
+ isOpen: true,
+ isChildrenLoading: false,
+ isBeingRemoved: false,
};
-const groupsData = {
- groups: [group1, group14, group2, group21],
- pagination: {
- Date: 'Mon, 22 May 2017 22:31:52 GMT',
- 'X-Prev-Page': '1',
- 'X-Content-Type-Options': 'nosniff',
- 'X-Total': '31',
- 'Transfer-Encoding': 'chunked',
- 'X-Runtime': '0.611144',
- 'X-Xss-Protection': '1; mode=block',
- 'X-Request-Id': 'f5db8368-3ce5-4aa4-89d2-a125d9dead09',
- 'X-Ua-Compatible': 'IE=edge',
- 'X-Per-Page': '20',
- Link: '<http://localhost:3000/dashboard/groups.json?page=1&per_page=20>; rel="prev", <http://localhost:3000/dashboard/groups.json?page=1&per_page=20>; rel="first", <http://localhost:3000/dashboard/groups.json?page=2&per_page=20>; rel="last"',
- 'X-Next-Page': '',
- Etag: 'W/"a82f846947136271cdb7d55d19ef33d2"',
- 'X-Frame-Options': 'DENY',
- 'Content-Type': 'application/json; charset=utf-8',
- 'Cache-Control': 'max-age=0, private, must-revalidate',
- 'X-Total-Pages': '2',
- 'X-Page': '2',
+export const mockRawChildren = [
+ {
+ id: 57,
+ name: 'bsp',
+ description: '',
+ visibility: 'public',
+ full_name: 'platform / hardware / bsp',
+ relative_path: '/platform/hardware/bsp',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/platform/hardware/bsp/edit',
+ children_count: 6,
+ leave_path: '/groups/platform/hardware/bsp/group_members/leave',
+ parent_id: 55,
+ number_users_with_delimiter: '1',
+ project_count: 4,
+ subgroup_count: 2,
+ can_leave: false,
+ children: [],
+ },
+];
+
+export const mockChildren = [
+ {
+ id: 57,
+ name: 'bsp',
+ description: '',
+ visibility: 'public',
+ fullName: 'platform / hardware / bsp',
+ relativePath: '/platform/hardware/bsp',
+ canEdit: true,
+ type: 'group',
+ avatarUrl: null,
+ permission: 'Owner',
+ editPath: '/groups/platform/hardware/bsp/edit',
+ childrenCount: 6,
+ leavePath: '/groups/platform/hardware/bsp/group_members/leave',
+ parentId: 55,
+ memberCount: '1',
+ projectCount: 4,
+ subgroupCount: 2,
+ canLeave: false,
+ children: [],
+ isOpen: true,
+ isChildrenLoading: false,
+ isBeingRemoved: false,
},
+];
+
+export const mockGroups = [
+ {
+ id: 75,
+ name: 'test-group',
+ description: '',
+ visibility: 'public',
+ full_name: 'test-group',
+ relative_path: '/test-group',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/test-group/edit',
+ children_count: 2,
+ leave_path: '/groups/test-group/group_members/leave',
+ parent_id: null,
+ number_users_with_delimiter: '1',
+ project_count: 2,
+ subgroup_count: 0,
+ can_leave: false,
+ },
+ {
+ id: 67,
+ name: 'open-source',
+ description: '',
+ visibility: 'private',
+ full_name: 'open-source',
+ relative_path: '/open-source',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/open-source/edit',
+ children_count: 0,
+ leave_path: '/groups/open-source/group_members/leave',
+ parent_id: null,
+ number_users_with_delimiter: '1',
+ project_count: 0,
+ subgroup_count: 0,
+ can_leave: false,
+ },
+ {
+ id: 54,
+ name: 'platform',
+ description: '',
+ visibility: 'public',
+ full_name: 'platform',
+ relative_path: '/platform',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/platform/edit',
+ children_count: 1,
+ leave_path: '/groups/platform/group_members/leave',
+ parent_id: null,
+ number_users_with_delimiter: '1',
+ project_count: 0,
+ subgroup_count: 1,
+ can_leave: false,
+ },
+ {
+ id: 5,
+ name: 'H5bp',
+ description: 'Minus dolor consequuntur qui nam recusandae quam incidunt.',
+ visibility: 'public',
+ full_name: 'H5bp',
+ relative_path: '/h5bp',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/h5bp/edit',
+ children_count: 1,
+ leave_path: '/groups/h5bp/group_members/leave',
+ parent_id: null,
+ number_users_with_delimiter: '5',
+ project_count: 1,
+ subgroup_count: 0,
+ can_leave: false,
+ },
+ {
+ id: 4,
+ name: 'Twitter',
+ description: 'Deserunt hic nostrum placeat veniam.',
+ visibility: 'public',
+ full_name: 'Twitter',
+ relative_path: '/twitter',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/twitter/edit',
+ children_count: 2,
+ leave_path: '/groups/twitter/group_members/leave',
+ parent_id: null,
+ number_users_with_delimiter: '5',
+ project_count: 2,
+ subgroup_count: 0,
+ can_leave: false,
+ },
+ {
+ id: 3,
+ name: 'Documentcloud',
+ description: 'Consequatur saepe totam ea pariatur maxime.',
+ visibility: 'public',
+ full_name: 'Documentcloud',
+ relative_path: '/documentcloud',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/documentcloud/edit',
+ children_count: 1,
+ leave_path: '/groups/documentcloud/group_members/leave',
+ parent_id: null,
+ number_users_with_delimiter: '5',
+ project_count: 1,
+ subgroup_count: 0,
+ can_leave: false,
+ },
+ {
+ id: 2,
+ name: 'Gitlab Org',
+ description: 'Debitis ea quas aperiam velit doloremque ab.',
+ visibility: 'public',
+ full_name: 'Gitlab Org',
+ relative_path: '/gitlab-org',
+ can_edit: true,
+ type: 'group',
+ avatar_url: '/uploads/-/system/group/avatar/2/GitLab.png',
+ permission: 'Owner',
+ edit_path: '/groups/gitlab-org/edit',
+ children_count: 4,
+ leave_path: '/groups/gitlab-org/group_members/leave',
+ parent_id: null,
+ number_users_with_delimiter: '5',
+ project_count: 4,
+ subgroup_count: 0,
+ can_leave: false,
+ },
+];
+
+export const mockSearchedGroups = [
+ {
+ id: 55,
+ name: 'hardware',
+ description: '',
+ visibility: 'public',
+ full_name: 'platform / hardware',
+ relative_path: '/platform/hardware',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/platform/hardware/edit',
+ children_count: 3,
+ leave_path: '/groups/platform/hardware/group_members/leave',
+ parent_id: 54,
+ number_users_with_delimiter: '1',
+ project_count: 1,
+ subgroup_count: 2,
+ can_leave: false,
+ children: [
+ {
+ id: 57,
+ name: 'bsp',
+ description: '',
+ visibility: 'public',
+ full_name: 'platform / hardware / bsp',
+ relative_path: '/platform/hardware/bsp',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/platform/hardware/bsp/edit',
+ children_count: 6,
+ leave_path: '/groups/platform/hardware/bsp/group_members/leave',
+ parent_id: 55,
+ number_users_with_delimiter: '1',
+ project_count: 4,
+ subgroup_count: 2,
+ can_leave: false,
+ children: [
+ {
+ id: 60,
+ name: 'kernel',
+ description: '',
+ visibility: 'public',
+ full_name: 'platform / hardware / bsp / kernel',
+ relative_path: '/platform/hardware/bsp/kernel',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/platform/hardware/bsp/kernel/edit',
+ children_count: 1,
+ leave_path: '/groups/platform/hardware/bsp/kernel/group_members/leave',
+ parent_id: 57,
+ number_users_with_delimiter: '1',
+ project_count: 0,
+ subgroup_count: 1,
+ can_leave: false,
+ children: [
+ {
+ id: 61,
+ name: 'common',
+ description: '',
+ visibility: 'public',
+ full_name: 'platform / hardware / bsp / kernel / common',
+ relative_path: '/platform/hardware/bsp/kernel/common',
+ can_edit: true,
+ type: 'group',
+ avatar_url: null,
+ permission: 'Owner',
+ edit_path: '/groups/platform/hardware/bsp/kernel/common/edit',
+ children_count: 2,
+ leave_path: '/groups/platform/hardware/bsp/kernel/common/group_members/leave',
+ parent_id: 60,
+ number_users_with_delimiter: '1',
+ project_count: 2,
+ subgroup_count: 0,
+ can_leave: false,
+ children: [
+ {
+ id: 17,
+ name: 'v4.4',
+ description: 'Voluptatem qui ea error aperiam veritatis doloremque consequatur temporibus.',
+ visibility: 'public',
+ full_name: 'platform / hardware / bsp / kernel / common / v4.4',
+ relative_path: '/platform/hardware/bsp/kernel/common/v4.4',
+ can_edit: true,
+ type: 'project',
+ avatar_url: null,
+ permission: null,
+ edit_path: '/platform/hardware/bsp/kernel/common/v4.4/edit',
+ star_count: 0,
+ },
+ {
+ id: 16,
+ name: 'v4.1',
+ description: 'Rerum expedita voluptatem doloribus neque ducimus ut hic.',
+ visibility: 'public',
+ full_name: 'platform / hardware / bsp / kernel / common / v4.1',
+ relative_path: '/platform/hardware/bsp/kernel/common/v4.1',
+ can_edit: true,
+ type: 'project',
+ avatar_url: null,
+ permission: null,
+ edit_path: '/platform/hardware/bsp/kernel/common/v4.1/edit',
+ star_count: 0,
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ },
+];
+
+export const mockRawPageInfo = {
+ 'x-per-page': 10,
+ 'x-page': 10,
+ 'x-total': 10,
+ 'x-total-pages': 10,
+ 'x-next-page': 10,
+ 'x-prev-page': 10,
};
-export { groupsData, group1 };
+export const mockPageInfo = {
+ perPage: 10,
+ page: 10,
+ total: 10,
+ totalPages: 10,
+ nextPage: 10,
+ prevPage: 10,
+};
diff --git a/spec/javascripts/groups/service/groups_service_spec.js b/spec/javascripts/groups/service/groups_service_spec.js
new file mode 100644
index 00000000000..20bb63687f7
--- /dev/null
+++ b/spec/javascripts/groups/service/groups_service_spec.js
@@ -0,0 +1,42 @@
+import Vue from 'vue';
+import VueResource from 'vue-resource';
+
+import GroupsService from '~/groups/service/groups_service';
+import { mockEndpoint, mockParentGroupItem } from '../mock_data';
+
+Vue.use(VueResource);
+
+describe('GroupsService', () => {
+ let service;
+
+ beforeEach(() => {
+ service = new GroupsService(mockEndpoint);
+ });
+
+ describe('getGroups', () => {
+ it('should return promise for `GET` request on provided endpoint', () => {
+ spyOn(service.groups, 'get').and.stub();
+ const queryParams = {
+ page: 2,
+ filter: 'git',
+ sort: 'created_asc',
+ archived: true,
+ };
+
+ service.getGroups(55, 2, 'git', 'created_asc', true);
+ expect(service.groups.get).toHaveBeenCalledWith({ parent_id: 55 });
+
+ service.getGroups(null, 2, 'git', 'created_asc', true);
+ expect(service.groups.get).toHaveBeenCalledWith(queryParams);
+ });
+ });
+
+ describe('leaveGroup', () => {
+ it('should return promise for `DELETE` request on provided endpoint', () => {
+ spyOn(Vue.http, 'delete').and.stub();
+
+ service.leaveGroup(mockParentGroupItem.leavePath);
+ expect(Vue.http.delete).toHaveBeenCalledWith(mockParentGroupItem.leavePath);
+ });
+ });
+});
diff --git a/spec/javascripts/groups/store/groups_store_spec.js b/spec/javascripts/groups/store/groups_store_spec.js
new file mode 100644
index 00000000000..d74f38f476e
--- /dev/null
+++ b/spec/javascripts/groups/store/groups_store_spec.js
@@ -0,0 +1,110 @@
+import GroupsStore from '~/groups/store/groups_store';
+import {
+ mockGroups, mockSearchedGroups,
+ mockParentGroupItem, mockRawChildren,
+ mockRawPageInfo,
+} from '../mock_data';
+
+describe('ProjectsStore', () => {
+ describe('constructor', () => {
+ it('should initialize default state', () => {
+ let store;
+
+ store = new GroupsStore();
+ expect(Object.keys(store.state).length).toBe(2);
+ expect(Array.isArray(store.state.groups)).toBeTruthy();
+ expect(Object.keys(store.state.pageInfo).length).toBe(0);
+ expect(store.hideProjects).not.toBeDefined();
+
+ store = new GroupsStore(true);
+ expect(store.hideProjects).toBeTruthy();
+ });
+ });
+
+ describe('setGroups', () => {
+ it('should set groups to state', () => {
+ const store = new GroupsStore();
+ spyOn(store, 'formatGroupItem').and.callThrough();
+
+ store.setGroups(mockGroups);
+ expect(store.state.groups.length).toBe(mockGroups.length);
+ expect(store.formatGroupItem).toHaveBeenCalledWith(jasmine.any(Object));
+ expect(Object.keys(store.state.groups[0]).indexOf('fullName') > -1).toBeTruthy();
+ });
+ });
+
+ describe('setSearchedGroups', () => {
+ it('should set searched groups to state', () => {
+ const store = new GroupsStore();
+ spyOn(store, 'formatGroupItem').and.callThrough();
+
+ store.setSearchedGroups(mockSearchedGroups);
+ expect(store.state.groups.length).toBe(mockSearchedGroups.length);
+ expect(store.formatGroupItem).toHaveBeenCalledWith(jasmine.any(Object));
+ expect(Object.keys(store.state.groups[0]).indexOf('fullName') > -1).toBeTruthy();
+ expect(Object.keys(store.state.groups[0].children[0]).indexOf('fullName') > -1).toBeTruthy();
+ });
+ });
+
+ describe('setGroupChildren', () => {
+ it('should set children to group item in state', () => {
+ const store = new GroupsStore();
+ spyOn(store, 'formatGroupItem').and.callThrough();
+
+ store.setGroupChildren(mockParentGroupItem, mockRawChildren);
+ expect(store.formatGroupItem).toHaveBeenCalledWith(jasmine.any(Object));
+ expect(mockParentGroupItem.children.length).toBe(1);
+ expect(Object.keys(mockParentGroupItem.children[0]).indexOf('fullName') > -1).toBeTruthy();
+ expect(mockParentGroupItem.isOpen).toBeTruthy();
+ expect(mockParentGroupItem.isChildrenLoading).toBeFalsy();
+ });
+ });
+
+ describe('setPaginationInfo', () => {
+ it('should parse and set pagination info in state', () => {
+ const store = new GroupsStore();
+
+ store.setPaginationInfo(mockRawPageInfo);
+ expect(store.state.pageInfo.perPage).toBe(10);
+ expect(store.state.pageInfo.page).toBe(10);
+ expect(store.state.pageInfo.total).toBe(10);
+ expect(store.state.pageInfo.totalPages).toBe(10);
+ expect(store.state.pageInfo.nextPage).toBe(10);
+ expect(store.state.pageInfo.previousPage).toBe(10);
+ });
+ });
+
+ describe('formatGroupItem', () => {
+ it('should parse group item object and return updated object', () => {
+ let store;
+ let updatedGroupItem;
+
+ store = new GroupsStore();
+ updatedGroupItem = store.formatGroupItem(mockRawChildren[0]);
+ expect(Object.keys(updatedGroupItem).indexOf('fullName') > -1).toBeTruthy();
+ expect(updatedGroupItem.childrenCount).toBe(mockRawChildren[0].children_count);
+ expect(updatedGroupItem.isChildrenLoading).toBe(false);
+ expect(updatedGroupItem.isBeingRemoved).toBe(false);
+
+ store = new GroupsStore(true);
+ updatedGroupItem = store.formatGroupItem(mockRawChildren[0]);
+ expect(Object.keys(updatedGroupItem).indexOf('fullName') > -1).toBeTruthy();
+ expect(updatedGroupItem.childrenCount).toBe(mockRawChildren[0].subgroup_count);
+ });
+ });
+
+ describe('removeGroup', () => {
+ it('should remove children from group item in state', () => {
+ const store = new GroupsStore();
+ const rawParentGroup = Object.assign({}, mockGroups[0]);
+ const rawChildGroup = Object.assign({}, mockGroups[1]);
+
+ store.setGroups([rawParentGroup]);
+ store.setGroupChildren(store.state.groups[0], [rawChildGroup]);
+ const childItem = store.state.groups[0].children[0];
+
+ store.removeGroup(childItem, store.state.groups[0]);
+ expect(store.state.groups[0].children.length).toBe(0);
+ });
+ });
+});
diff --git a/spec/javascripts/helpers/set_timeout_promise_helper.js b/spec/javascripts/helpers/set_timeout_promise_helper.js
new file mode 100644
index 00000000000..1478073413c
--- /dev/null
+++ b/spec/javascripts/helpers/set_timeout_promise_helper.js
@@ -0,0 +1,3 @@
+export default (time = 0) => new Promise((resolve) => {
+ setTimeout(resolve, time);
+});
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index 583a3a74d77..2ea290108a4 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -332,4 +332,15 @@ describe('Issuable output', () => {
.catch(done.fail);
});
});
+
+ describe('show inline edit button', () => {
+ it('should not render by default', () => {
+ expect(vm.$el.querySelector('.title-container .note-action-button')).toBeDefined();
+ });
+
+ it('should render if showInlineEditButton', () => {
+ vm.showInlineEditButton = true;
+ expect(vm.$el.querySelector('.title-container .note-action-button')).toBeDefined();
+ });
+ });
});
diff --git a/spec/javascripts/issue_show/components/title_spec.js b/spec/javascripts/issue_show/components/title_spec.js
index a2d90a9b9f5..c1edc785d0f 100644
--- a/spec/javascripts/issue_show/components/title_spec.js
+++ b/spec/javascripts/issue_show/components/title_spec.js
@@ -1,6 +1,7 @@
import Vue from 'vue';
import Store from '~/issue_show/stores';
import titleComponent from '~/issue_show/components/title.vue';
+import eventHub from '~/issue_show/event_hub';
describe('Title component', () => {
let vm;
@@ -25,7 +26,7 @@ describe('Title component', () => {
it('renders title HTML', () => {
expect(
- vm.$el.innerHTML.trim(),
+ vm.$el.querySelector('.title').innerHTML.trim(),
).toBe('Testing <img>');
});
@@ -47,12 +48,12 @@ describe('Title component', () => {
Vue.nextTick(() => {
expect(
- vm.$el.classList.contains('issue-realtime-pre-pulse'),
+ vm.$el.querySelector('.title').classList.contains('issue-realtime-pre-pulse'),
).toBeTruthy();
setTimeout(() => {
expect(
- vm.$el.classList.contains('issue-realtime-trigger-pulse'),
+ vm.$el.querySelector('.title').classList.contains('issue-realtime-trigger-pulse'),
).toBeTruthy();
done();
@@ -72,4 +73,36 @@ describe('Title component', () => {
done();
});
});
+
+ describe('inline edit button', () => {
+ beforeEach(() => {
+ spyOn(eventHub, '$emit');
+ });
+
+ it('should not show by default', () => {
+ expect(vm.$el.querySelector('.note-action-button')).toBeNull();
+ });
+
+ it('should not show if canUpdate is false', () => {
+ vm.showInlineEditButton = true;
+ vm.canUpdate = false;
+ expect(vm.$el.querySelector('.note-action-button')).toBeNull();
+ });
+
+ it('should show if showInlineEditButton and canUpdate', () => {
+ vm.showInlineEditButton = true;
+ vm.canUpdate = true;
+ expect(vm.$el.querySelector('.note-action-button')).toBeDefined();
+ });
+
+ it('should trigger open.form event when clicked', () => {
+ vm.showInlineEditButton = true;
+ vm.canUpdate = true;
+
+ Vue.nextTick(() => {
+ vm.$el.querySelector('.note-action-button').click();
+ expect(eventHub.$emit).toHaveBeenCalledWith('open.form');
+ });
+ });
+ });
});
diff --git a/spec/javascripts/jobs/header_spec.js b/spec/javascripts/jobs/header_spec.js
index c7179b3e03d..4a210faa017 100644
--- a/spec/javascripts/jobs/header_spec.js
+++ b/spec/javascripts/jobs/header_spec.js
@@ -30,7 +30,6 @@ describe('Job details header', () => {
email: 'foo@bar.com',
avatar_url: 'link',
},
- retry_path: 'path',
new_issue_path: 'path',
},
isLoading: false,
@@ -49,12 +48,6 @@ describe('Job details header', () => {
).toEqual('failed Job #123 triggered 3 weeks ago by Foo');
});
- it('should render retry link', () => {
- expect(
- vm.$el.querySelector('.js-retry-button').getAttribute('href'),
- ).toEqual(props.job.retry_path);
- });
-
it('should render new issue link', () => {
expect(
vm.$el.querySelector('.js-new-issue').getAttribute('href'),
diff --git a/spec/javascripts/lib/utils/common_utils_spec.js b/spec/javascripts/lib/utils/common_utils_spec.js
index f86f2f260c3..a5298be5669 100644
--- a/spec/javascripts/lib/utils/common_utils_spec.js
+++ b/spec/javascripts/lib/utils/common_utils_spec.js
@@ -467,15 +467,27 @@ describe('common_utils', () => {
commonUtils.ajaxPost(requestURL, data);
expect(ajaxSpy.calls.allArgs()[0][0].type).toEqual('POST');
});
+ });
- describe('gl.utils.spriteIcon', () => {
- beforeEach(() => {
- window.gon.sprite_icons = 'icons.svg';
- });
+ describe('spriteIcon', () => {
+ let beforeGon;
- it('should return the svg for a linked icon', () => {
- expect(gl.utils.spriteIcon('test')).toEqual('<svg><use xlink:href="icons.svg#test" /></svg>');
- });
+ beforeEach(() => {
+ window.gon = window.gon || {};
+ beforeGon = Object.assign({}, window.gon);
+ window.gon.sprite_icons = 'icons.svg';
+ });
+
+ afterEach(() => {
+ window.gon = beforeGon;
+ });
+
+ it('should return the svg for a linked icon', () => {
+ expect(commonUtils.spriteIcon('test')).toEqual('<svg ><use xlink:href="icons.svg#test" /></svg>');
+ });
+
+ it('should set svg className when passed', () => {
+ expect(commonUtils.spriteIcon('test', 'fa fa-test')).toEqual('<svg class="fa fa-test"><use xlink:href="icons.svg#test" /></svg>');
});
});
});
diff --git a/spec/javascripts/lib/utils/datefix_spec.js b/spec/javascripts/lib/utils/datefix_spec.js
new file mode 100644
index 00000000000..0b9fde2be67
--- /dev/null
+++ b/spec/javascripts/lib/utils/datefix_spec.js
@@ -0,0 +1,29 @@
+import { pad, parsePikadayDate, pikadayToString } from '~/lib/utils/datefix';
+
+describe('datefix', () => {
+ describe('pad', () => {
+ it('should add a 0 when length is smaller than 2', () => {
+ expect(pad(2)).toEqual('02');
+ });
+
+ it('should not add a zero when lenght matches the default', () => {
+ expect(pad(12)).toEqual('12');
+ });
+
+ it('should add a 0 when lenght is smaller than the provided', () => {
+ expect(pad(12, 3)).toEqual('012');
+ });
+ });
+
+ describe('parsePikadayDate', () => {
+ it('should return a UTC date', () => {
+ expect(parsePikadayDate('2020-01-29')).toEqual(new Date('2020-01-29'));
+ });
+ });
+
+ describe('pikadayToString', () => {
+ it('should format a UTC date into yyyy-mm-dd format', () => {
+ expect(pikadayToString(new Date('2020-01-29'))).toEqual('2020-01-29');
+ });
+ });
+});
diff --git a/spec/javascripts/registry/mock_data.js b/spec/javascripts/registry/mock_data.js
index 18600d00bff..6bffb47be55 100644
--- a/spec/javascripts/registry/mock_data.js
+++ b/spec/javascripts/registry/mock_data.js
@@ -26,7 +26,7 @@ export const registryServerResponse = [
name: 'centos7',
short_revision: 'b118ab5b0',
revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
- size: 679,
+ total_size: 679,
layers: 19,
location: 'location',
created_at: 1505828744434,
@@ -36,7 +36,7 @@ export const registryServerResponse = [
name: 'centos6',
short_revision: 'b118ab5b0',
revision: 'b118ab5b0e90b7cb5127db31d5321ac14961d097516a8e0e72084b6cdc783b43',
- size: 679,
+ total_size: 679,
layers: 19,
location: 'location',
created_at: 1505828744434,
@@ -70,7 +70,7 @@ export const parsedRegistryServerResponse = [
tag: registryServerResponse[0].name,
revision: registryServerResponse[0].revision,
shortRevision: registryServerResponse[0].short_revision,
- size: registryServerResponse[0].size,
+ size: registryServerResponse[0].total_size,
layers: registryServerResponse[0].layers,
location: registryServerResponse[0].location,
createdAt: registryServerResponse[0].created_at,
@@ -81,7 +81,7 @@ export const parsedRegistryServerResponse = [
tag: registryServerResponse[1].name,
revision: registryServerResponse[1].revision,
shortRevision: registryServerResponse[1].short_revision,
- size: registryServerResponse[1].size,
+ size: registryServerResponse[1].total_size,
layers: registryServerResponse[1].layers,
location: registryServerResponse[1].location,
createdAt: registryServerResponse[1].created_at,
diff --git a/spec/javascripts/repo/components/repo_commit_section_spec.js b/spec/javascripts/repo/components/repo_commit_section_spec.js
index e604dcc152d..e09d593f04c 100644
--- a/spec/javascripts/repo/components/repo_commit_section_spec.js
+++ b/spec/javascripts/repo/components/repo_commit_section_spec.js
@@ -2,29 +2,13 @@ import Vue from 'vue';
import repoCommitSection from '~/repo/components/repo_commit_section.vue';
import RepoStore from '~/repo/stores/repo_store';
import RepoService from '~/repo/services/repo_service';
+import getSetTimeoutPromise from '../../helpers/set_timeout_promise_helper';
describe('RepoCommitSection', () => {
const branch = 'master';
const projectUrl = 'projectUrl';
- const changedFiles = [{
- id: 0,
- changed: true,
- url: `/namespace/${projectUrl}/blob/${branch}/dir/file0.ext`,
- path: 'dir/file0.ext',
- newContent: 'a',
- }, {
- id: 1,
- changed: true,
- url: `/namespace/${projectUrl}/blob/${branch}/dir/file1.ext`,
- path: 'dir/file1.ext',
- newContent: 'b',
- }];
- const openedFiles = changedFiles.concat([{
- id: 2,
- url: `/namespace/${projectUrl}/blob/${branch}/dir/file2.ext`,
- path: 'dir/file2.ext',
- changed: false,
- }]);
+ let changedFiles;
+ let openedFiles;
RepoStore.projectUrl = projectUrl;
@@ -34,6 +18,29 @@ describe('RepoCommitSection', () => {
return new RepoCommitSection().$mount(el);
}
+ beforeEach(() => {
+ // Create a copy for each test because these can get modified directly
+ changedFiles = [{
+ id: 0,
+ changed: true,
+ url: `/namespace/${projectUrl}/blob/${branch}/dir/file0.ext`,
+ path: 'dir/file0.ext',
+ newContent: 'a',
+ }, {
+ id: 1,
+ changed: true,
+ url: `/namespace/${projectUrl}/blob/${branch}/dir/file1.ext`,
+ path: 'dir/file1.ext',
+ newContent: 'b',
+ }];
+ openedFiles = changedFiles.concat([{
+ id: 2,
+ url: `/namespace/${projectUrl}/blob/${branch}/dir/file2.ext`,
+ path: 'dir/file2.ext',
+ changed: false,
+ }]);
+ });
+
it('renders a commit section', () => {
RepoStore.isCommitable = true;
RepoStore.currentBranch = branch;
@@ -85,55 +92,105 @@ describe('RepoCommitSection', () => {
expect(vm.$el.innerHTML).toBeFalsy();
});
- it('shows commit submit and summary if commitMessage and spinner if submitCommitsLoading', (done) => {
+ describe('when submitting', () => {
+ let el;
+ let vm;
const projectId = 'projectId';
const commitMessage = 'commitMessage';
- RepoStore.isCommitable = true;
- RepoStore.currentBranch = branch;
- RepoStore.targetBranch = branch;
- RepoStore.openedFiles = openedFiles;
- RepoStore.projectId = projectId;
- // We need to append to body to get form `submit` events working
- // Otherwise we run into, "Form submission canceled because the form is not connected"
- // See https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#form-submission-algorithm
- const el = document.createElement('div');
- document.body.appendChild(el);
-
- const vm = createComponent(el);
- const commitMessageEl = vm.$el.querySelector('#commit-message');
- const submitCommit = vm.$refs.submitCommit;
+ beforeEach((done) => {
+ RepoStore.isCommitable = true;
+ RepoStore.currentBranch = branch;
+ RepoStore.targetBranch = branch;
+ RepoStore.openedFiles = openedFiles;
+ RepoStore.projectId = projectId;
+
+ // We need to append to body to get form `submit` events working
+ // Otherwise we run into, "Form submission canceled because the form is not connected"
+ // See https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#form-submission-algorithm
+ el = document.createElement('div');
+ document.body.appendChild(el);
+
+ vm = createComponent(el);
+ vm.commitMessage = commitMessage;
+
+ spyOn(vm, 'tryCommit').and.callThrough();
+ spyOn(vm, 'redirectToNewMr').and.stub();
+ spyOn(vm, 'redirectToBranch').and.stub();
+ spyOn(RepoService, 'commitFiles').and.returnValue(Promise.resolve());
+ spyOn(RepoService, 'getBranch').and.returnValue(Promise.resolve({
+ commit: {
+ id: 1,
+ short_id: 1,
+ },
+ }));
+
+ // Wait for the vm data to be in place
+ Vue.nextTick(() => {
+ done();
+ });
+ });
- vm.commitMessage = commitMessage;
+ afterEach(() => {
+ vm.$destroy();
+ el.remove();
+ RepoStore.openedFiles = [];
+ });
- Vue.nextTick(() => {
+ it('shows commit message', () => {
+ const commitMessageEl = vm.$el.querySelector('#commit-message');
expect(commitMessageEl.value).toBe(commitMessage);
- expect(submitCommit.disabled).toBeFalsy();
+ });
- spyOn(vm, 'makeCommit').and.callThrough();
- spyOn(RepoService, 'commitFiles').and.callFake(() => Promise.resolve());
+ it('allows you to submit', () => {
+ const submitCommit = vm.$refs.submitCommit;
+ expect(submitCommit.disabled).toBeFalsy();
+ });
+ it('shows commit submit and summary if commitMessage and spinner if submitCommitsLoading', (done) => {
+ const submitCommit = vm.$refs.submitCommit;
submitCommit.click();
- Vue.nextTick(() => {
- expect(vm.makeCommit).toHaveBeenCalled();
- expect(submitCommit.querySelector('.fa-spinner.fa-spin')).toBeTruthy();
-
- const args = RepoService.commitFiles.calls.allArgs()[0];
- const { commit_message, actions, branch: payloadBranch } = args[0];
-
- expect(commit_message).toBe(commitMessage);
- expect(actions.length).toEqual(2);
- expect(payloadBranch).toEqual(branch);
- expect(actions[0].action).toEqual('update');
- expect(actions[1].action).toEqual('update');
- expect(actions[0].content).toEqual(openedFiles[0].newContent);
- expect(actions[1].content).toEqual(openedFiles[1].newContent);
- expect(actions[0].file_path).toEqual(openedFiles[0].path);
- expect(actions[1].file_path).toEqual(openedFiles[1].path);
+ // Wait for the branch check to finish
+ getSetTimeoutPromise()
+ .then(() => Vue.nextTick())
+ .then(() => {
+ expect(vm.tryCommit).toHaveBeenCalled();
+ expect(submitCommit.querySelector('.js-commit-loading-icon')).toBeTruthy();
+ expect(vm.redirectToBranch).toHaveBeenCalled();
+
+ const args = RepoService.commitFiles.calls.allArgs()[0];
+ const { commit_message, actions, branch: payloadBranch } = args[0];
+
+ expect(commit_message).toBe(commitMessage);
+ expect(actions.length).toEqual(2);
+ expect(payloadBranch).toEqual(branch);
+ expect(actions[0].action).toEqual('update');
+ expect(actions[1].action).toEqual('update');
+ expect(actions[0].content).toEqual(openedFiles[0].newContent);
+ expect(actions[1].content).toEqual(openedFiles[1].newContent);
+ expect(actions[0].file_path).toEqual(openedFiles[0].path);
+ expect(actions[1].file_path).toEqual(openedFiles[1].path);
+ })
+ .then(done)
+ .catch(done.fail);
+ });
- done();
- });
+ it('redirects to MR creation page if start new MR checkbox checked', (done) => {
+ vm.startNewMR = true;
+
+ Vue.nextTick()
+ .then(() => {
+ const submitCommit = vm.$refs.submitCommit;
+ submitCommit.click();
+ })
+ // Wait for the branch check to finish
+ .then(() => getSetTimeoutPromise())
+ .then(() => {
+ expect(vm.redirectToNewMr).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
});
});
@@ -143,6 +200,7 @@ describe('RepoCommitSection', () => {
const vm = {
submitCommitsLoading: true,
changedFiles: new Array(10),
+ openedFiles: new Array(3),
commitMessage: 'commitMessage',
editMode: true,
};
diff --git a/spec/javascripts/repo/components/repo_edit_button_spec.js b/spec/javascripts/repo/components/repo_edit_button_spec.js
index 411514009dc..dff2fac191d 100644
--- a/spec/javascripts/repo/components/repo_edit_button_spec.js
+++ b/spec/javascripts/repo/components/repo_edit_button_spec.js
@@ -9,6 +9,10 @@ describe('RepoEditButton', () => {
return new RepoEditButton().$mount();
}
+ afterEach(() => {
+ RepoStore.openedFiles = [];
+ });
+
it('renders an edit button that toggles the view state', (done) => {
RepoStore.isCommitable = true;
RepoStore.changedFiles = [];
@@ -38,12 +42,4 @@ describe('RepoEditButton', () => {
expect(vm.$el.innerHTML).toBeUndefined();
});
-
- describe('methods', () => {
- describe('editCancelClicked', () => {
- it('sets dialog to open when there are changedFiles');
-
- it('toggles editMode and calls toggleBlobView');
- });
- });
});
diff --git a/spec/javascripts/repo/components/repo_editor_spec.js b/spec/javascripts/repo/components/repo_editor_spec.js
index 85d55d171f9..a25a600b3be 100644
--- a/spec/javascripts/repo/components/repo_editor_spec.js
+++ b/spec/javascripts/repo/components/repo_editor_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+import RepoStore from '~/repo/stores/repo_store';
import repoEditor from '~/repo/components/repo_editor.vue';
describe('RepoEditor', () => {
@@ -8,6 +9,10 @@ describe('RepoEditor', () => {
this.vm = new RepoEditor().$mount();
});
+ afterEach(() => {
+ RepoStore.openedFiles = [];
+ });
+
it('renders an ide container', (done) => {
this.vm.openedFiles = ['idiidid'];
this.vm.binary = false;
diff --git a/spec/javascripts/repo/components/repo_file_buttons_spec.js b/spec/javascripts/repo/components/repo_file_buttons_spec.js
index dfab51710c3..701c260224f 100644
--- a/spec/javascripts/repo/components/repo_file_buttons_spec.js
+++ b/spec/javascripts/repo/components/repo_file_buttons_spec.js
@@ -9,6 +9,10 @@ describe('RepoFileButtons', () => {
return new RepoFileButtons().$mount();
}
+ afterEach(() => {
+ RepoStore.openedFiles = [];
+ });
+
it('renders Raw, Blame, History, Permalink and Preview toggle', () => {
const activeFile = {
extension: 'md',
diff --git a/spec/javascripts/repo/components/repo_file_options_spec.js b/spec/javascripts/repo/components/repo_file_options_spec.js
deleted file mode 100644
index 9759b4bf12d..00000000000
--- a/spec/javascripts/repo/components/repo_file_options_spec.js
+++ /dev/null
@@ -1,33 +0,0 @@
-import Vue from 'vue';
-import repoFileOptions from '~/repo/components/repo_file_options.vue';
-
-describe('RepoFileOptions', () => {
- const projectName = 'projectName';
-
- function createComponent(propsData) {
- const RepoFileOptions = Vue.extend(repoFileOptions);
-
- return new RepoFileOptions({
- propsData,
- }).$mount();
- }
-
- it('renders the title and new file/folder buttons if isMini is true', () => {
- const vm = createComponent({
- isMini: true,
- projectName,
- });
-
- expect(vm.$el.classList.contains('repo-file-options')).toBeTruthy();
- expect(vm.$el.querySelector('.title').textContent).toEqual(projectName);
- });
-
- it('does not render if isMini is false', () => {
- const vm = createComponent({
- isMini: false,
- projectName,
- });
-
- expect(vm.$el.innerHTML).toBeFalsy();
- });
-});
diff --git a/spec/javascripts/repo/components/repo_file_spec.js b/spec/javascripts/repo/components/repo_file_spec.js
index 620b604f404..107f6797f8a 100644
--- a/spec/javascripts/repo/components/repo_file_spec.js
+++ b/spec/javascripts/repo/components/repo_file_spec.js
@@ -1,21 +1,11 @@
import Vue from 'vue';
import repoFile from '~/repo/components/repo_file.vue';
import RepoStore from '~/repo/stores/repo_store';
+import eventHub from '~/repo/event_hub';
+import { file } from '../mock_data';
describe('RepoFile', () => {
const updated = 'updated';
- const file = {
- icon: 'icon',
- url: 'url',
- name: 'name',
- lastCommitMessage: 'message',
- lastCommitUpdate: Date.now(),
- level: 10,
- };
- const activeFile = {
- pageTitle: 'pageTitle',
- url: 'url',
- };
const otherFile = {
html: '<p class="file-content">html</p>',
pageTitle: 'otherpageTitle',
@@ -29,12 +19,15 @@ describe('RepoFile', () => {
}).$mount();
}
+ beforeEach(() => {
+ RepoStore.openedFiles = [];
+ });
+
it('renders link, icon, name and last commit details', () => {
const RepoFile = Vue.extend(repoFile);
const vm = new RepoFile({
propsData: {
- file,
- activeFile,
+ file: file(),
},
});
spyOn(vm, 'timeFormated').and.returnValue(updated);
@@ -43,28 +36,20 @@ describe('RepoFile', () => {
const name = vm.$el.querySelector('.repo-file-name');
const fileIcon = vm.$el.querySelector('.file-icon');
- expect(vm.$el.classList.contains('active')).toBeTruthy();
- expect(vm.$el.querySelector(`.${file.icon}`).style.marginLeft).toEqual('100px');
- expect(name.title).toEqual(file.url);
- expect(name.href).toMatch(`/${file.url}`);
- expect(name.textContent.trim()).toEqual(file.name);
- expect(vm.$el.querySelector('.commit-message').textContent.trim()).toBe(file.lastCommitMessage);
+ expect(vm.$el.querySelector(`.${vm.file.icon}`).style.marginLeft).toEqual('0px');
+ expect(name.href).toMatch(`/${vm.file.url}`);
+ expect(name.textContent.trim()).toEqual(vm.file.name);
+ expect(vm.$el.querySelector('.commit-message').textContent.trim()).toBe(vm.file.lastCommit.message);
expect(vm.$el.querySelector('.commit-update').textContent.trim()).toBe(updated);
- expect(fileIcon.classList.contains(file.icon)).toBeTruthy();
- expect(fileIcon.style.marginLeft).toEqual(`${file.level * 10}px`);
+ expect(fileIcon.classList.contains(vm.file.icon)).toBeTruthy();
+ expect(fileIcon.style.marginLeft).toEqual(`${vm.file.level * 10}px`);
});
it('does render if hasFiles is true and is loading tree', () => {
const vm = createComponent({
- file,
- activeFile,
- loading: {
- tree: true,
- },
- hasFiles: true,
+ file: file(),
});
- expect(vm.$el.innerHTML).toBeTruthy();
expect(vm.$el.querySelector('.fa-spin.fa-spinner')).toBeFalsy();
});
@@ -75,75 +60,77 @@ describe('RepoFile', () => {
});
it('renders a spinner if the file is loading', () => {
- file.loading = true;
+ const f = file();
+ f.loading = true;
const vm = createComponent({
- file,
- activeFile,
- loading: {
- tree: true,
- },
- hasFiles: true,
+ file: f,
});
- expect(vm.$el.innerHTML).toBeTruthy();
- expect(vm.$el.querySelector('.fa-spin.fa-spinner').style.marginLeft).toEqual(`${file.level * 10}px`);
- });
-
- it('does not render if loading tree', () => {
- const vm = createComponent({
- file,
- activeFile,
- loading: {
- tree: true,
- },
- });
-
- expect(vm.$el.innerHTML).toBeFalsy();
+ expect(vm.$el.querySelector('.fa-spin.fa-spinner')).not.toBeNull();
+ expect(vm.$el.querySelector('.fa-spin.fa-spinner').style.marginLeft).toEqual(`${vm.file.level * 16}px`);
});
it('does not render commit message and datetime if mini', () => {
+ RepoStore.openedFiles.push(file());
+
const vm = createComponent({
- file,
- activeFile,
- isMini: true,
+ file: file(),
});
expect(vm.$el.querySelector('.commit-message')).toBeFalsy();
expect(vm.$el.querySelector('.commit-update')).toBeFalsy();
});
- it('does not set active class if file is active file', () => {
+ it('fires linkClicked when the link is clicked', () => {
const vm = createComponent({
- file,
- activeFile: {},
+ file: file(),
});
- expect(vm.$el.classList.contains('active')).toBeFalsy();
+ spyOn(vm, 'linkClicked');
+
+ vm.$el.click();
+
+ expect(vm.linkClicked).toHaveBeenCalledWith(vm.file);
});
- it('fires linkClicked when the link is clicked', () => {
- const vm = createComponent({
- file,
- activeFile,
+ describe('submodule', () => {
+ let f;
+ let vm;
+
+ beforeEach(() => {
+ f = file('submodule name', '123456789');
+ f.type = 'submodule';
+
+ vm = createComponent({
+ file: f,
+ });
});
- spyOn(vm, 'linkClicked');
+ afterEach(() => {
+ vm.$destroy();
+ });
- vm.$el.querySelector('.repo-file-name').click();
+ it('renders submodule short ID', () => {
+ expect(vm.$el.querySelector('.commit-sha').textContent.trim()).toBe('12345678');
+ });
- expect(vm.linkClicked).toHaveBeenCalledWith(file);
+ it('renders ID next to submodule name', () => {
+ expect(vm.$el.querySelector('td').textContent.replace(/\s+/g, ' ')).toContain('submodule name @ 12345678');
+ });
});
describe('methods', () => {
describe('linkClicked', () => {
- const vm = jasmine.createSpyObj('vm', ['$emit']);
+ it('$emits fileNameClicked with file obj', () => {
+ spyOn(eventHub, '$emit');
- it('$emits linkclicked with file obj', () => {
- const theFile = {};
+ const vm = createComponent({
+ file: file(),
+ });
- repoFile.methods.linkClicked.call(vm, theFile);
+ vm.linkClicked(vm.file);
- expect(vm.$emit).toHaveBeenCalledWith('linkclicked', theFile);
+ expect(eventHub.$emit).toHaveBeenCalledWith('fileNameClicked', vm.file);
});
});
});
diff --git a/spec/javascripts/repo/components/repo_loading_file_spec.js b/spec/javascripts/repo/components/repo_loading_file_spec.js
index a030314d749..e9f95a02028 100644
--- a/spec/javascripts/repo/components/repo_loading_file_spec.js
+++ b/spec/javascripts/repo/components/repo_loading_file_spec.js
@@ -1,4 +1,5 @@
import Vue from 'vue';
+import RepoStore from '~/repo/stores/repo_store';
import repoLoadingFile from '~/repo/components/repo_loading_file.vue';
describe('RepoLoadingFile', () => {
@@ -28,6 +29,10 @@ describe('RepoLoadingFile', () => {
});
}
+ afterEach(() => {
+ RepoStore.openedFiles = [];
+ });
+
it('renders 3 columns of animated LoC', () => {
const vm = createComponent({
loading: {
@@ -42,38 +47,16 @@ describe('RepoLoadingFile', () => {
});
it('renders 1 column of animated LoC if isMini', () => {
+ RepoStore.openedFiles = new Array(1);
const vm = createComponent({
loading: {
tree: true,
},
hasFiles: false,
- isMini: true,
});
const columns = [...vm.$el.querySelectorAll('td')];
expect(columns.length).toEqual(1);
assertColumns(columns);
});
-
- it('does not render if tree is not loading', () => {
- const vm = createComponent({
- loading: {
- tree: false,
- },
- hasFiles: false,
- });
-
- expect(vm.$el.innerHTML).toBeFalsy();
- });
-
- it('does not render if hasFiles is true', () => {
- const vm = createComponent({
- loading: {
- tree: true,
- },
- hasFiles: true,
- });
-
- expect(vm.$el.innerHTML).toBeFalsy();
- });
});
diff --git a/spec/javascripts/repo/components/repo_prev_directory_spec.js b/spec/javascripts/repo/components/repo_prev_directory_spec.js
index 34dde545e6a..4c064f21084 100644
--- a/spec/javascripts/repo/components/repo_prev_directory_spec.js
+++ b/spec/javascripts/repo/components/repo_prev_directory_spec.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import repoPrevDirectory from '~/repo/components/repo_prev_directory.vue';
+import eventHub from '~/repo/event_hub';
describe('RepoPrevDirectory', () => {
function createComponent(propsData) {
@@ -20,7 +21,7 @@ describe('RepoPrevDirectory', () => {
spyOn(vm, 'linkClicked');
expect(link.href).toMatch(`/${prevUrl}`);
- expect(link.textContent).toEqual('..');
+ expect(link.textContent).toEqual('...');
link.click();
@@ -29,14 +30,17 @@ describe('RepoPrevDirectory', () => {
describe('methods', () => {
describe('linkClicked', () => {
- const vm = jasmine.createSpyObj('vm', ['$emit']);
+ it('$emits linkclicked with prevUrl', () => {
+ const prevUrl = 'prevUrl';
+ const vm = createComponent({
+ prevUrl,
+ });
- it('$emits linkclicked with file obj', () => {
- const file = {};
+ spyOn(eventHub, '$emit');
- repoPrevDirectory.methods.linkClicked.call(vm, file);
+ vm.linkClicked(prevUrl);
- expect(vm.$emit).toHaveBeenCalledWith('linkclicked', file);
+ expect(eventHub.$emit).toHaveBeenCalledWith('goToPreviousDirectoryClicked', prevUrl);
});
});
});
diff --git a/spec/javascripts/repo/components/repo_sidebar_spec.js b/spec/javascripts/repo/components/repo_sidebar_spec.js
index 35d2b37ac2a..148f275e03d 100644
--- a/spec/javascripts/repo/components/repo_sidebar_spec.js
+++ b/spec/javascripts/repo/components/repo_sidebar_spec.js
@@ -3,6 +3,7 @@ import Helper from '~/repo/helpers/repo_helper';
import RepoService from '~/repo/services/repo_service';
import RepoStore from '~/repo/stores/repo_store';
import repoSidebar from '~/repo/components/repo_sidebar.vue';
+import { file } from '../mock_data';
describe('RepoSidebar', () => {
let vm;
@@ -15,14 +16,15 @@ describe('RepoSidebar', () => {
afterEach(() => {
vm.$destroy();
+
+ RepoStore.files = [];
+ RepoStore.openedFiles = [];
});
it('renders a sidebar', () => {
- RepoStore.files = [{
- id: 0,
- }];
+ RepoStore.files = [file()];
RepoStore.openedFiles = [];
- RepoStore.isRoot = false;
+ RepoStore.isRoot = true;
vm = createComponent();
const thead = vm.$el.querySelector('thead');
@@ -30,9 +32,9 @@ describe('RepoSidebar', () => {
expect(vm.$el.id).toEqual('sidebar');
expect(vm.$el.classList.contains('sidebar-mini')).toBeFalsy();
- expect(thead.querySelector('.name').textContent).toEqual('Name');
- expect(thead.querySelector('.last-commit').textContent).toEqual('Last commit');
- expect(thead.querySelector('.last-update').textContent).toEqual('Last update');
+ expect(thead.querySelector('.name').textContent.trim()).toEqual('Name');
+ expect(thead.querySelector('.last-commit').textContent.trim()).toEqual('Last commit');
+ expect(thead.querySelector('.last-update').textContent.trim()).toEqual('Last update');
expect(tbody.querySelector('.repo-file-options')).toBeFalsy();
expect(tbody.querySelector('.prev-directory')).toBeFalsy();
expect(tbody.querySelector('.loading-file')).toBeFalsy();
@@ -46,76 +48,89 @@ describe('RepoSidebar', () => {
vm = createComponent();
expect(vm.$el.classList.contains('sidebar-mini')).toBeTruthy();
- expect(vm.$el.querySelector('thead')).toBeFalsy();
- expect(vm.$el.querySelector('tbody .repo-file-options')).toBeTruthy();
+ expect(vm.$el.querySelector('thead')).toBeTruthy();
+ expect(vm.$el.querySelector('thead .repo-file-options')).toBeTruthy();
});
it('renders 5 loading files if tree is loading and not hasFiles', () => {
- RepoStore.loading = {
- tree: true,
- };
+ RepoStore.loading.tree = true;
RepoStore.files = [];
vm = createComponent();
expect(vm.$el.querySelectorAll('tbody .loading-file').length).toEqual(5);
});
- it('renders a prev directory if isRoot', () => {
- RepoStore.files = [{
- id: 0,
- }];
- RepoStore.isRoot = true;
+ it('renders a prev directory if is not root', () => {
+ RepoStore.files = [file()];
+ RepoStore.isRoot = false;
+ RepoStore.loading.tree = false;
vm = createComponent();
expect(vm.$el.querySelector('tbody .prev-directory')).toBeTruthy();
});
+ describe('flattendFiles', () => {
+ it('returns a flattend array of files', () => {
+ const f = file();
+ f.files.push(file('testing 123'));
+ const files = [f, file()];
+ vm = createComponent();
+ vm.files = files;
+
+ expect(vm.flattendFiles.length).toBe(3);
+ expect(vm.flattendFiles[1].name).toBe('testing 123');
+ });
+ });
+
describe('methods', () => {
describe('fileClicked', () => {
it('should fetch data for new file', () => {
spyOn(Helper, 'getContent').and.callThrough();
- const file1 = {
- id: 0,
- url: '',
- };
- RepoStore.files = [file1];
+ RepoStore.files = [file()];
RepoStore.isRoot = true;
vm = createComponent();
- vm.fileClicked(file1);
+ vm.fileClicked(RepoStore.files[0]);
- expect(Helper.getContent).toHaveBeenCalledWith(file1);
+ expect(Helper.getContent).toHaveBeenCalledWith(RepoStore.files[0]);
});
it('should not fetch data for already opened files', () => {
- const file = {
- id: 42,
- url: 'foo',
- };
-
- spyOn(Helper, 'getFileFromPath').and.returnValue(file);
+ const f = file();
+ spyOn(Helper, 'getFileFromPath').and.returnValue(f);
spyOn(RepoStore, 'setActiveFiles');
vm = createComponent();
- vm.fileClicked(file);
+ vm.fileClicked(f);
- expect(RepoStore.setActiveFiles).toHaveBeenCalledWith(file);
+ expect(RepoStore.setActiveFiles).toHaveBeenCalledWith(f);
});
it('should hide files in directory if already open', () => {
- spyOn(RepoStore, 'removeChildFilesOfTree').and.callThrough();
- const file1 = {
- id: 0,
- type: 'tree',
- url: '',
- opened: true,
- };
- RepoStore.files = [file1];
- RepoStore.isRoot = true;
+ spyOn(Helper, 'setDirectoryToClosed').and.callThrough();
+ const f = file();
+ f.opened = true;
+ f.type = 'tree';
+ RepoStore.files = [f];
vm = createComponent();
- vm.fileClicked(file1);
+ vm.fileClicked(RepoStore.files[0]);
+
+ expect(Helper.setDirectoryToClosed).toHaveBeenCalledWith(RepoStore.files[0]);
+ });
+
+ describe('submodule', () => {
+ it('opens submodule project URL', () => {
+ spyOn(gl.utils, 'visitUrl');
- expect(RepoStore.removeChildFilesOfTree).toHaveBeenCalledWith(file1);
+ const f = file();
+ f.type = 'submodule';
+
+ vm = createComponent();
+
+ vm.fileClicked(f);
+
+ expect(gl.utils.visitUrl).toHaveBeenCalledWith('url');
+ });
});
});
@@ -131,36 +146,31 @@ describe('RepoSidebar', () => {
});
describe('back button', () => {
- const file1 = {
- id: 1,
- url: 'file1',
- };
- const file2 = {
- id: 2,
- url: 'file2',
- };
- RepoStore.files = [file1, file2];
- RepoStore.openedFiles = [file1, file2];
- RepoStore.isRoot = true;
+ beforeEach(() => {
+ const f = file();
+ const file2 = Object.assign({}, file());
+ file2.url = 'test';
+ RepoStore.files = [f, file2];
+ RepoStore.openedFiles = [];
+ RepoStore.isRoot = true;
- vm = createComponent();
- vm.fileClicked(file1);
+ vm = createComponent();
+ });
it('render previous file when using back button', () => {
spyOn(Helper, 'getContent').and.callThrough();
- vm.fileClicked(file2);
- expect(Helper.getContent).toHaveBeenCalledWith(file2);
- Helper.getContent.calls.reset();
+ vm.fileClicked(RepoStore.files[1]);
+ expect(Helper.getContent).toHaveBeenCalledWith(RepoStore.files[1]);
history.pushState({
key: Math.random(),
- }, '', file1.url);
+ }, '', RepoStore.files[1].url);
const popEvent = document.createEvent('Event');
popEvent.initEvent('popstate', true, true);
window.dispatchEvent(popEvent);
- expect(Helper.getContent.calls.mostRecent().args[0].url).toContain(file1.url);
+ expect(Helper.getContent.calls.mostRecent().args[0].url).toContain(RepoStore.files[1].url);
window.history.pushState({}, null, '/');
});
diff --git a/spec/javascripts/repo/components/repo_tab_spec.js b/spec/javascripts/repo/components/repo_tab_spec.js
index d2a790ad73a..37e297437f0 100644
--- a/spec/javascripts/repo/components/repo_tab_spec.js
+++ b/spec/javascripts/repo/components/repo_tab_spec.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import repoTab from '~/repo/components/repo_tab.vue';
+import RepoStore from '~/repo/stores/repo_store';
describe('RepoTab', () => {
function createComponent(propsData) {
@@ -18,7 +19,7 @@ describe('RepoTab', () => {
const vm = createComponent({
tab,
});
- const close = vm.$el.querySelector('.close');
+ const close = vm.$el.querySelector('.close-btn');
const name = vm.$el.querySelector(`a[title="${tab.url}"]`);
spyOn(vm, 'closeTab');
@@ -44,26 +45,43 @@ describe('RepoTab', () => {
tab,
});
- expect(vm.$el.querySelector('.close .fa-circle')).toBeTruthy();
+ expect(vm.$el.querySelector('.close-btn .fa-circle')).toBeTruthy();
});
describe('methods', () => {
describe('closeTab', () => {
- const vm = jasmine.createSpyObj('vm', ['$emit']);
-
it('returns undefined and does not $emit if file is changed', () => {
- const file = { changed: true };
- const returnVal = repoTab.methods.closeTab.call(vm, file);
+ const tab = {
+ url: 'url',
+ name: 'name',
+ changed: true,
+ };
+ const vm = createComponent({
+ tab,
+ });
+
+ spyOn(RepoStore, 'removeFromOpenedFiles');
+
+ vm.$el.querySelector('.close-btn').click();
- expect(returnVal).toBeUndefined();
- expect(vm.$emit).not.toHaveBeenCalled();
+ expect(RepoStore.removeFromOpenedFiles).not.toHaveBeenCalled();
});
it('$emits tabclosed event with file obj', () => {
- const file = { changed: false };
- repoTab.methods.closeTab.call(vm, file);
+ const tab = {
+ url: 'url',
+ name: 'name',
+ changed: false,
+ };
+ const vm = createComponent({
+ tab,
+ });
+
+ spyOn(RepoStore, 'removeFromOpenedFiles');
+
+ vm.$el.querySelector('.close-btn').click();
- expect(vm.$emit).toHaveBeenCalledWith('tabclosed', file);
+ expect(RepoStore.removeFromOpenedFiles).toHaveBeenCalledWith(tab);
});
});
});
diff --git a/spec/javascripts/repo/components/repo_tabs_spec.js b/spec/javascripts/repo/components/repo_tabs_spec.js
index a02b54efafc..431129bc866 100644
--- a/spec/javascripts/repo/components/repo_tabs_spec.js
+++ b/spec/javascripts/repo/components/repo_tabs_spec.js
@@ -16,6 +16,10 @@ describe('RepoTabs', () => {
return new RepoTabs().$mount();
}
+ afterEach(() => {
+ RepoStore.openedFiles = [];
+ });
+
it('renders a list of tabs', () => {
RepoStore.openedFiles = openedFiles;
@@ -28,18 +32,4 @@ describe('RepoTabs', () => {
expect(tabs[1].classList.contains('active')).toBeFalsy();
expect(tabs[2].classList.contains('tabs-divider')).toBeTruthy();
});
-
- describe('methods', () => {
- describe('tabClosed', () => {
- it('calls removeFromOpenedFiles with file obj', () => {
- const file = {};
-
- spyOn(RepoStore, 'removeFromOpenedFiles');
-
- repoTabs.methods.tabClosed(file);
-
- expect(RepoStore.removeFromOpenedFiles).toHaveBeenCalledWith(file);
- });
- });
- });
});
diff --git a/spec/javascripts/repo/mock_data.js b/spec/javascripts/repo/mock_data.js
new file mode 100644
index 00000000000..71e275caf09
--- /dev/null
+++ b/spec/javascripts/repo/mock_data.js
@@ -0,0 +1,14 @@
+import RepoHelper from '~/repo/helpers/repo_helper';
+
+// eslint-disable-next-line import/prefer-default-export
+export const file = (name = 'name', id = name) => RepoHelper.serializeRepoEntity('blob', {
+ id,
+ icon: 'icon',
+ url: 'url',
+ name,
+ last_commit: {
+ id: '123',
+ message: 'test',
+ committed_date: new Date().toISOString(),
+ },
+});
diff --git a/spec/javascripts/shortcuts_issuable_spec.js b/spec/javascripts/shortcuts_issuable_spec.js
index a912e150e9b..f6320db8dc4 100644
--- a/spec/javascripts/shortcuts_issuable_spec.js
+++ b/spec/javascripts/shortcuts_issuable_spec.js
@@ -1,7 +1,5 @@
-/* global ShortcutsIssuable */
-
import '~/copy_as_gfm';
-import '~/shortcuts_issuable';
+import ShortcutsIssuable from '~/shortcuts_issuable';
describe('ShortcutsIssuable', () => {
const fixtureName = 'merge_requests/diff_comment.html.raw';
diff --git a/spec/javascripts/shortcuts_spec.js b/spec/javascripts/shortcuts_spec.js
index 53e4c68beb3..a2a609edef9 100644
--- a/spec/javascripts/shortcuts_spec.js
+++ b/spec/javascripts/shortcuts_spec.js
@@ -1,4 +1,5 @@
-/* global Shortcuts */
+import Shortcuts from '~/shortcuts';
+
describe('Shortcuts', () => {
const fixtureName = 'merge_requests/diff_comment.html.raw';
const createEvent = (type, target) => $.Event(type, {
@@ -8,19 +9,17 @@ describe('Shortcuts', () => {
preloadFixtures(fixtureName);
describe('toggleMarkdownPreview', () => {
- let sc;
-
beforeEach(() => {
loadFixtures(fixtureName);
spyOnEvent('.js-new-note-form .js-md-preview-button', 'focus');
spyOnEvent('.edit-note .js-md-preview-button', 'focus');
- sc = new Shortcuts();
+ new Shortcuts(); // eslint-disable-line no-new
});
it('focuses preview button in form', () => {
- sc.toggleMarkdownPreview(
+ Shortcuts.toggleMarkdownPreview(
createEvent('KeyboardEvent', document.querySelector('.js-new-note-form .js-note-text'),
));
@@ -31,7 +30,7 @@ describe('Shortcuts', () => {
document.querySelector('.js-note-edit').click();
setTimeout(() => {
- sc.toggleMarkdownPreview(
+ Shortcuts.toggleMarkdownPreview(
createEvent('KeyboardEvent', document.querySelector('.edit-note .js-note-text'),
));
diff --git a/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js
new file mode 100644
index 00000000000..aa93134f2dd
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_image_spec.js
@@ -0,0 +1,84 @@
+import Vue from 'vue';
+import { placeholderImage } from '~/lazy_loader';
+import userAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
+import mountComponent from '../../../helpers/vue_mount_component_helper';
+
+const DEFAULT_PROPS = {
+ size: 99,
+ imgSrc: 'myavatarurl.com',
+ imgAlt: 'mydisplayname',
+ cssClasses: 'myextraavatarclass',
+ tooltipText: 'tooltip text',
+ tooltipPlacement: 'bottom',
+};
+
+describe('User Avatar Image Component', function () {
+ let vm;
+ let UserAvatarImage;
+
+ beforeEach(() => {
+ UserAvatarImage = Vue.extend(userAvatarImage);
+ });
+
+ describe('Initialization', function () {
+ beforeEach(function () {
+ vm = mountComponent(UserAvatarImage, {
+ ...DEFAULT_PROPS,
+ }).$mount();
+ });
+
+ it('should return a defined Vue component', function () {
+ expect(vm).toBeDefined();
+ });
+
+ it('should have <img> as a child element', function () {
+ expect(vm.$el.tagName).toBe('IMG');
+ expect(vm.$el.getAttribute('src')).toBe(DEFAULT_PROPS.imgSrc);
+ expect(vm.$el.getAttribute('data-src')).toBe(DEFAULT_PROPS.imgSrc);
+ expect(vm.$el.getAttribute('alt')).toBe(DEFAULT_PROPS.imgAlt);
+ });
+
+ it('should properly compute tooltipContainer', function () {
+ expect(vm.tooltipContainer).toBe('body');
+ });
+
+ it('should properly render tooltipContainer', function () {
+ expect(vm.$el.getAttribute('data-container')).toBe('body');
+ });
+
+ it('should properly compute avatarSizeClass', function () {
+ expect(vm.avatarSizeClass).toBe('s99');
+ });
+
+ it('should properly render img css', function () {
+ const classList = vm.$el.classList;
+ const containsAvatar = classList.contains('avatar');
+ const containsSizeClass = classList.contains('s99');
+ const containsCustomClass = classList.contains(DEFAULT_PROPS.cssClasses);
+ const lazyClass = classList.contains('lazy');
+
+ expect(containsAvatar).toBe(true);
+ expect(containsSizeClass).toBe(true);
+ expect(containsCustomClass).toBe(true);
+ expect(lazyClass).toBe(false);
+ });
+ });
+
+ describe('Initialization when lazy', function () {
+ beforeEach(function () {
+ vm = mountComponent(UserAvatarImage, {
+ ...DEFAULT_PROPS,
+ lazy: true,
+ }).$mount();
+ });
+
+ it('should add lazy attributes', function () {
+ const classList = vm.$el.classList;
+ const lazyClass = classList.contains('lazy');
+
+ expect(lazyClass).toBe(true);
+ expect(vm.$el.getAttribute('src')).toBe(placeholderImage);
+ expect(vm.$el.getAttribute('data-src')).toBe(DEFAULT_PROPS.imgSrc);
+ });
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/user_avatar_link_spec.js b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_link_spec.js
index 52e450e9ba5..52e450e9ba5 100644
--- a/spec/javascripts/vue_shared/components/user_avatar_link_spec.js
+++ b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_link_spec.js
diff --git a/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_svg_spec.js
index b8d639ffbec..b8d639ffbec 100644
--- a/spec/javascripts/vue_shared/components/user_avatar_svg_spec.js
+++ b/spec/javascripts/vue_shared/components/user_avatar/user_avatar_svg_spec.js
diff --git a/spec/javascripts/vue_shared/components/user_avatar_image_spec.js b/spec/javascripts/vue_shared/components/user_avatar_image_spec.js
deleted file mode 100644
index 8daa7610274..00000000000
--- a/spec/javascripts/vue_shared/components/user_avatar_image_spec.js
+++ /dev/null
@@ -1,54 +0,0 @@
-import Vue from 'vue';
-import UserAvatarImage from '~/vue_shared/components/user_avatar/user_avatar_image.vue';
-
-const UserAvatarImageComponent = Vue.extend(UserAvatarImage);
-
-describe('User Avatar Image Component', function () {
- describe('Initialization', function () {
- beforeEach(function () {
- this.propsData = {
- size: 99,
- imgSrc: 'myavatarurl.com',
- imgAlt: 'mydisplayname',
- cssClasses: 'myextraavatarclass',
- tooltipText: 'tooltip text',
- tooltipPlacement: 'bottom',
- };
-
- this.userAvatarImage = new UserAvatarImageComponent({
- propsData: this.propsData,
- }).$mount();
- });
-
- it('should return a defined Vue component', function () {
- expect(this.userAvatarImage).toBeDefined();
- });
-
- it('should have <img> as a child element', function () {
- expect(this.userAvatarImage.$el.tagName).toBe('IMG');
- });
-
- it('should properly compute tooltipContainer', function () {
- expect(this.userAvatarImage.tooltipContainer).toBe('body');
- });
-
- it('should properly render tooltipContainer', function () {
- expect(this.userAvatarImage.$el.getAttribute('data-container')).toBe('body');
- });
-
- it('should properly compute avatarSizeClass', function () {
- expect(this.userAvatarImage.avatarSizeClass).toBe('s99');
- });
-
- it('should properly render img css', function () {
- const classList = this.userAvatarImage.$el.classList;
- const containsAvatar = classList.contains('avatar');
- const containsSizeClass = classList.contains('s99');
- const containsCustomClass = classList.contains('myextraavatarclass');
-
- expect(containsAvatar).toBe(true);
- expect(containsSizeClass).toBe(true);
- expect(containsCustomClass).toBe(true);
- });
- });
-});
diff --git a/spec/javascripts/zen_mode_spec.js b/spec/javascripts/zen_mode_spec.js
index bd18f79cea7..7047053d131 100644
--- a/spec/javascripts/zen_mode_spec.js
+++ b/spec/javascripts/zen_mode_spec.js
@@ -1,7 +1,6 @@
/* eslint-disable space-before-function-paren, no-var, one-var, one-var-declaration-per-line, object-shorthand, comma-dangle, no-return-assign, new-cap, max-len */
-/* global Dropzone */
/* global Mousetrap */
-
+import Dropzone from 'dropzone';
import ZenMode from '~/zen_mode';
(function() {
diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb
index 5f41e28fece..17a620ef603 100644
--- a/spec/lib/banzai/filter/sanitization_filter_spec.rb
+++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb
@@ -217,6 +217,11 @@ describe Banzai::Filter::SanitizationFilter do
output: '<img>'
},
+ 'protocol-based JS injection: Unicode' => {
+ input: %Q(<a href="\u0001java\u0003script:alert('XSS')">foo</a>),
+ output: '<a>foo</a>'
+ },
+
'protocol-based JS injection: spaces and entities' => {
input: '<a href=" &#14; javascript:alert(\'XSS\');">foo</a>',
output: '<a href="">foo</a>'
diff --git a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
index d2e7243ee05..4d3fdbd9554 100644
--- a/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
+++ b/spec/lib/gitlab/background_migration/deserialize_merge_request_diffs_and_commits_spec.rb
@@ -31,8 +31,8 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
end
it 'creates correct entries in the merge_request_diff_commits table' do
- expect(updated_merge_request_diff.merge_request_diff_commits.count).to eq(commits.count)
- expect(updated_merge_request_diff.commits.map(&:to_hash)).to eq(commits)
+ expect(updated_merge_request_diff.merge_request_diff_commits.count).to eq(expected_commits.count)
+ expect(updated_merge_request_diff.commits.map(&:to_hash)).to eq(expected_commits)
end
it 'creates correct entries in the merge_request_diff_files table' do
@@ -199,6 +199,16 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diff has valid commits and diffs' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
+ let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
+ let(:expected_diffs) { diffs }
+
+ include_examples 'updated MR diff'
+ end
+
+ context 'when the merge request diff has diffs but no commits' do
+ let(:commits) { nil }
+ let(:expected_commits) { [] }
let(:diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:expected_diffs) { diffs }
@@ -207,6 +217,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs do not have too_large set' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:diffs) do
@@ -218,6 +229,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs do not have a_mode and b_mode set' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:expected_diffs) { diffs_to_hashes(merge_request_diff.merge_request_diff_files) }
let(:diffs) do
@@ -229,6 +241,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs have binary content' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:expected_diffs) { diffs }
# The start of a PDF created by Illustrator
@@ -257,6 +270,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diff has commits, but no diffs' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:diffs) { [] }
let(:expected_diffs) { diffs }
@@ -265,6 +279,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs have invalid content' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
+ let(:expected_commits) { commits }
let(:diffs) { ['--broken-diff'] }
let(:expected_diffs) { [] }
@@ -274,6 +289,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs are Rugged::Patch instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:first_commit) { merge_request.project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:expected_commits) { commits }
let(:diffs) { first_commit.rugged_diff_from_parent.patches }
let(:expected_diffs) { [] }
@@ -283,6 +299,7 @@ describe Gitlab::BackgroundMigration::DeserializeMergeRequestDiffsAndCommits, :t
context 'when the merge request diffs are Rugged::Diff::Delta instances' do
let(:commits) { merge_request_diff.commits.map(&:to_hash) }
let(:first_commit) { merge_request.project.repository.commit(merge_request_diff.head_commit_sha) }
+ let(:expected_commits) { commits }
let(:diffs) { first_commit.rugged_diff_from_parent.deltas }
let(:expected_diffs) { [] }
diff --git a/spec/lib/gitlab/backup/manager_spec.rb b/spec/lib/gitlab/backup/manager_spec.rb
index 422f2af7266..b68301a066a 100644
--- a/spec/lib/gitlab/backup/manager_spec.rb
+++ b/spec/lib/gitlab/backup/manager_spec.rb
@@ -172,10 +172,6 @@ describe Backup::Manager do
end
describe '#unpack' do
- before do
- allow(Dir).to receive(:chdir)
- end
-
context 'when there are no backup files in the directory' do
before do
allow(Dir).to receive(:glob).and_return([])
diff --git a/spec/lib/gitlab/conflict/file_collection_spec.rb b/spec/lib/gitlab/conflict/file_collection_spec.rb
index a4d7628b03a..5944ce8049a 100644
--- a/spec/lib/gitlab/conflict/file_collection_spec.rb
+++ b/spec/lib/gitlab/conflict/file_collection_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Gitlab::Conflict::FileCollection do
let(:merge_request) { create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start') }
- let(:file_collection) { described_class.read_only(merge_request) }
+ let(:file_collection) { described_class.new(merge_request) }
describe '#files' do
it 'returns an array of Conflict::Files' do
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index 5356e9742b4..bf981d2f6f6 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -8,9 +8,10 @@ describe Gitlab::Conflict::File do
let(:our_commit) { rugged.branches['conflict-resolvable'].target }
let(:merge_request) { create(:merge_request, source_branch: 'conflict-resolvable', target_branch: 'conflict-start', source_project: project) }
let(:index) { rugged.merge_commits(our_commit, their_commit) }
- let(:conflict) { index.conflicts.last }
- let(:merge_file_result) { index.merge_file('files/ruby/regex.rb') }
- let(:conflict_file) { described_class.new(merge_file_result, conflict, merge_request: merge_request) }
+ let(:rugged_conflict) { index.conflicts.last }
+ let(:raw_conflict_content) { index.merge_file('files/ruby/regex.rb')[:data] }
+ let(:raw_conflict_file) { Gitlab::Git::Conflict::File.new(repository, our_commit.oid, rugged_conflict, raw_conflict_content) }
+ let(:conflict_file) { described_class.new(raw_conflict_file, merge_request: merge_request) }
describe '#resolve_lines' do
let(:section_keys) { conflict_file.sections.map { |section| section[:id] }.compact }
@@ -48,18 +49,18 @@ describe Gitlab::Conflict::File do
end
end
- it 'raises MissingResolution when passed a hash without resolutions for all sections' do
+ it 'raises ResolutionError when passed a hash without resolutions for all sections' do
empty_hash = section_keys.map { |key| [key, nil] }.to_h
invalid_hash = section_keys.map { |key| [key, 'invalid'] }.to_h
expect { conflict_file.resolve_lines({}) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
expect { conflict_file.resolve_lines(empty_hash) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
expect { conflict_file.resolve_lines(invalid_hash) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
end
end
@@ -144,7 +145,7 @@ describe Gitlab::Conflict::File do
end
context 'with an example file' do
- let(:file) do
+ let(:raw_conflict_content) do
<<FILE
# Ensure there is no match line header here
def username_regexp
@@ -220,7 +221,6 @@ end
FILE
end
- let(:conflict_file) { described_class.new({ data: file }, conflict, merge_request: merge_request) }
let(:sections) { conflict_file.sections }
it 'sets the correct match line headers' do
diff --git a/spec/lib/gitlab/current_settings_spec.rb b/spec/lib/gitlab/current_settings_spec.rb
index d57ffcae8e1..492659a82b0 100644
--- a/spec/lib/gitlab/current_settings_spec.rb
+++ b/spec/lib/gitlab/current_settings_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::CurrentSettings do
it 'falls back to DB if Redis returns an empty value' do
expect(ApplicationSetting).to receive(:cached).and_return(nil)
- expect(ApplicationSetting).to receive(:last).and_call_original
+ expect(ApplicationSetting).to receive(:last).and_call_original.twice
expect(current_application_settings).to be_a(ApplicationSetting)
end
diff --git a/spec/lib/gitlab/diff/parser_spec.rb b/spec/lib/gitlab/diff/parser_spec.rb
index 8af49ed50ff..80c8c189665 100644
--- a/spec/lib/gitlab/diff/parser_spec.rb
+++ b/spec/lib/gitlab/diff/parser_spec.rb
@@ -143,4 +143,21 @@ eos
it { expect(parser.parse([])).to eq([]) }
it { expect(parser.parse(nil)).to eq([]) }
end
+
+ describe 'tolerates special diff markers in a content' do
+ it "counts lines correctly" do
+ diff = <<~END
+ --- a/test
+ +++ b/test
+ @@ -1,2 +1,2 @@
+ +ipsum
+ +++ b
+ -ipsum
+ END
+
+ lines = parser.parse(diff.lines).to_a
+
+ expect(lines.size).to eq(3)
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/position_spec.rb b/spec/lib/gitlab/diff/position_spec.rb
index 9bf54fdecc4..245f24e96d4 100644
--- a/spec/lib/gitlab/diff/position_spec.rb
+++ b/spec/lib/gitlab/diff/position_spec.rb
@@ -40,7 +40,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 0)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 0)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -108,7 +108,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 15)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 15)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -149,7 +149,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -189,7 +189,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 13, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 13, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -233,7 +233,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 5)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 5)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -274,7 +274,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -314,7 +314,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 4, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 4, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -357,7 +357,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 0, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 0, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -399,7 +399,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, subject.new_line, 0)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, subject.new_line, 0)
expect(subject.line_code(project.repository)).to eq(line_code)
end
@@ -447,7 +447,7 @@ describe Gitlab::Diff::Position do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(subject.file_path, 0, subject.old_line)
+ line_code = Gitlab::Git.diff_line_code(subject.file_path, 0, subject.old_line)
expect(subject.line_code(project.repository)).to eq(line_code)
end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index 8b14b227e65..9151c66afb3 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -6,6 +6,9 @@ describe Gitlab::EncodingHelper do
describe '#encode!' do
[
+ ["nil", nil, nil],
+ ["empty string", "".encode("ASCII-8BIT"), "".encode("UTF-8")],
+ ["invalid utf-8 encoded string", "my bad string\xE5".force_encoding("UTF-8"), "my bad string"],
[
'leaves ascii only string as is',
'ascii only string',
@@ -81,6 +84,9 @@ describe Gitlab::EncodingHelper do
describe '#encode_utf8' do
[
+ ["nil", nil, nil],
+ ["empty string", "".encode("ASCII-8BIT"), "".encode("UTF-8")],
+ ["invalid utf-8 encoded string", "my bad string\xE5".force_encoding("UTF-8"), "my bad stringå"],
[
"encodes valid utf8 encoded string to utf8",
"λ, λ, λ".encode("UTF-8"),
@@ -95,12 +101,18 @@ describe Gitlab::EncodingHelper do
"encodes valid ISO-8859-1 encoded string to utf8",
"Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("ISO-8859-1", "UTF-8"),
"Rüby ist eine Programmiersprache. Wir verlängern den text damit ICU die Sprache erkennen kann.".encode("UTF-8")
+ ],
+ [
+ # Test case from https://gitlab.com/gitlab-org/gitlab-ce/issues/39227
+ "Equifax branch name",
+ "refs/heads/Equifax".encode("UTF-8"),
+ "refs/heads/Equifax".encode("UTF-8")
]
].each do |description, test_string, xpect|
it description do
- r = ext_class.encode_utf8(test_string.force_encoding('UTF-8'))
+ r = ext_class.encode_utf8(test_string)
expect(r).to eq(xpect)
- expect(r.encoding.name).to eq('UTF-8')
+ expect(r.encoding.name).to eq('UTF-8') if xpect
end
end
diff --git a/spec/lib/gitlab/conflict/parser_spec.rb b/spec/lib/gitlab/git/conflict/parser_spec.rb
index fce606a2bb5..7b035a381f1 100644
--- a/spec/lib/gitlab/conflict/parser_spec.rb
+++ b/spec/lib/gitlab/git/conflict/parser_spec.rb
@@ -1,11 +1,9 @@
require 'spec_helper'
-describe Gitlab::Conflict::Parser do
- let(:parser) { described_class.new }
-
- describe '#parse' do
+describe Gitlab::Git::Conflict::Parser do
+ describe '.parse' do
def parse_text(text)
- parser.parse(text, our_path: 'README.md', their_path: 'README.md')
+ described_class.parse(text, our_path: 'README.md', their_path: 'README.md')
end
context 'when the file has valid conflicts' do
@@ -87,33 +85,37 @@ CONFLICT
end
let(:lines) do
- parser.parse(text, our_path: 'files/ruby/regex.rb', their_path: 'files/ruby/regex.rb')
+ described_class.parse(text, our_path: 'files/ruby/regex.rb', their_path: 'files/ruby/regex.rb')
+ end
+ let(:old_line_numbers) do
+ lines.select { |line| line[:type] != 'new' }.map { |line| line[:line_old] }
end
+ let(:new_line_numbers) do
+ lines.select { |line| line[:type] != 'old' }.map { |line| line[:line_new] }
+ end
+ let(:line_indexes) { lines.map { |line| line[:line_obj_index] } }
it 'sets our lines as new lines' do
- expect(lines[8..13]).to all(have_attributes(type: 'new'))
- expect(lines[26..27]).to all(have_attributes(type: 'new'))
- expect(lines[56..57]).to all(have_attributes(type: 'new'))
+ expect(lines[8..13]).to all(include(type: 'new'))
+ expect(lines[26..27]).to all(include(type: 'new'))
+ expect(lines[56..57]).to all(include(type: 'new'))
end
it 'sets their lines as old lines' do
- expect(lines[14..19]).to all(have_attributes(type: 'old'))
- expect(lines[28..29]).to all(have_attributes(type: 'old'))
- expect(lines[58..59]).to all(have_attributes(type: 'old'))
+ expect(lines[14..19]).to all(include(type: 'old'))
+ expect(lines[28..29]).to all(include(type: 'old'))
+ expect(lines[58..59]).to all(include(type: 'old'))
end
it 'sets non-conflicted lines as both' do
- expect(lines[0..7]).to all(have_attributes(type: nil))
- expect(lines[20..25]).to all(have_attributes(type: nil))
- expect(lines[30..55]).to all(have_attributes(type: nil))
- expect(lines[60..62]).to all(have_attributes(type: nil))
+ expect(lines[0..7]).to all(include(type: nil))
+ expect(lines[20..25]).to all(include(type: nil))
+ expect(lines[30..55]).to all(include(type: nil))
+ expect(lines[60..62]).to all(include(type: nil))
end
- it 'sets consecutive line numbers for index, old_pos, and new_pos' do
- old_line_numbers = lines.select { |line| line.type != 'new' }.map(&:old_pos)
- new_line_numbers = lines.select { |line| line.type != 'old' }.map(&:new_pos)
-
- expect(lines.map(&:index)).to eq(0.upto(62).to_a)
+ it 'sets consecutive line numbers for line_obj_index, line_old, and line_new' do
+ expect(line_indexes).to eq(0.upto(62).to_a)
expect(old_line_numbers).to eq(1.upto(53).to_a)
expect(new_line_numbers).to eq(1.upto(53).to_a)
end
@@ -123,12 +125,12 @@ CONFLICT
context 'when there is a non-start delimiter first' do
it 'raises UnexpectedDelimiter when there is a middle delimiter first' do
expect { parse_text('=======') }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'raises UnexpectedDelimiter when there is an end delimiter first' do
expect { parse_text('>>>>>>> README.md') }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'does not raise when there is an end delimiter for a different path first' do
@@ -143,12 +145,12 @@ CONFLICT
it 'raises UnexpectedDelimiter when it is followed by an end delimiter' do
expect { parse_text(start_text + '>>>>>>> README.md' + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'raises UnexpectedDelimiter when it is followed by another start delimiter' do
expect { parse_text(start_text + start_text + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'does not raise when it is followed by a start delimiter for a different path' do
@@ -163,12 +165,12 @@ CONFLICT
it 'raises UnexpectedDelimiter when it is followed by another middle delimiter' do
expect { parse_text(start_text + '=======' + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'raises UnexpectedDelimiter when it is followed by a start delimiter' do
expect { parse_text(start_text + start_text + end_text) }
- .to raise_error(Gitlab::Conflict::Parser::UnexpectedDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnexpectedDelimiter)
end
it 'does not raise when it is followed by a start delimiter for another path' do
@@ -181,25 +183,25 @@ CONFLICT
start_text = "<<<<<<< README.md\n=======\n"
expect { parse_text(start_text) }
- .to raise_error(Gitlab::Conflict::Parser::MissingEndDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::MissingEndDelimiter)
expect { parse_text(start_text + '>>>>>>> some-other-path.md') }
- .to raise_error(Gitlab::Conflict::Parser::MissingEndDelimiter)
+ .to raise_error(Gitlab::Git::Conflict::Parser::MissingEndDelimiter)
end
end
context 'other file types' do
it 'raises UnmergeableFile when lines is blank, indicating a binary file' do
expect { parse_text('') }
- .to raise_error(Gitlab::Conflict::Parser::UnmergeableFile)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
expect { parse_text(nil) }
- .to raise_error(Gitlab::Conflict::Parser::UnmergeableFile)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
it 'raises UnmergeableFile when the file is over 200 KB' do
expect { parse_text('a' * 204801) }
- .to raise_error(Gitlab::Conflict::Parser::UnmergeableFile)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnmergeableFile)
end
# All text from Rugged has an encoding of ASCII_8BIT, so force that in
@@ -214,7 +216,7 @@ CONFLICT
context 'when the file contains non-UTF-8 characters' do
it 'raises UnsupportedEncoding' do
expect { parse_text("a\xC4\xFC".force_encoding(Encoding::ASCII_8BIT)) }
- .to raise_error(Gitlab::Conflict::Parser::UnsupportedEncoding)
+ .to raise_error(Gitlab::Git::Conflict::Parser::UnsupportedEncoding)
end
end
end
diff --git a/spec/lib/gitlab/git/env_spec.rb b/spec/lib/gitlab/git/env_spec.rb
index d9df99bfe05..03836d49518 100644
--- a/spec/lib/gitlab/git/env_spec.rb
+++ b/spec/lib/gitlab/git/env_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
describe Gitlab::Git::Env do
- describe "#set" do
+ describe ".set" do
context 'with RequestStore.store disabled' do
before do
allow(RequestStore).to receive(:active?).and_return(false)
@@ -34,25 +34,57 @@ describe Gitlab::Git::Env do
end
end
- describe "#all" do
+ describe ".all" do
context 'with RequestStore.store enabled' do
before do
allow(RequestStore).to receive(:active?).and_return(true)
described_class.set(
GIT_OBJECT_DIRECTORY: 'foo',
- GIT_ALTERNATE_OBJECT_DIRECTORIES: 'bar')
+ GIT_ALTERNATE_OBJECT_DIRECTORIES: ['bar'])
end
it 'returns an env hash' do
expect(described_class.all).to eq({
'GIT_OBJECT_DIRECTORY' => 'foo',
- 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => 'bar'
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES' => ['bar']
})
end
end
end
- describe "#[]" do
+ describe ".to_env_hash" do
+ context 'with RequestStore.store enabled' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:key) { 'GIT_OBJECT_DIRECTORY' }
+ subject { described_class.to_env_hash }
+
+ where(:input, :output) do
+ nil | nil
+ 'foo' | 'foo'
+ [] | ''
+ ['foo'] | 'foo'
+ %w[foo bar] | 'foo:bar'
+ end
+
+ with_them do
+ before do
+ allow(RequestStore).to receive(:active?).and_return(true)
+ described_class.set(key.to_sym => input)
+ end
+
+ it 'puts the right value in the hash' do
+ if output
+ expect(subject.fetch(key)).to eq(output)
+ else
+ expect(subject.has_key?(key)).to eq(false)
+ end
+ end
+ end
+ end
+ end
+
+ describe ".[]" do
context 'with RequestStore.store enabled' do
before do
allow(RequestStore).to receive(:active?).and_return(true)
diff --git a/spec/lib/gitlab/git/popen_spec.rb b/spec/lib/gitlab/git/popen_spec.rb
new file mode 100644
index 00000000000..2b65bc1cf15
--- /dev/null
+++ b/spec/lib/gitlab/git/popen_spec.rb
@@ -0,0 +1,132 @@
+require 'spec_helper'
+
+describe 'Gitlab::Git::Popen' do
+ let(:path) { Rails.root.join('tmp').to_s }
+
+ let(:klass) do
+ Class.new(Object) do
+ include Gitlab::Git::Popen
+ end
+ end
+
+ context 'popen' do
+ context 'zero status' do
+ let(:result) { klass.new.popen(%w(ls), path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to be_zero }
+ it { expect(output).to include('tests') }
+ end
+
+ context 'non-zero status' do
+ let(:result) { klass.new.popen(%w(cat NOTHING), path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to eq(1) }
+ it { expect(output).to include('No such file or directory') }
+ end
+
+ context 'unsafe string command' do
+ it 'raises an error when it gets called with a string argument' do
+ expect { klass.new.popen('ls', path) }.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'with custom options' do
+ let(:vars) { { 'foobar' => 123, 'PWD' => path } }
+ let(:options) { { chdir: path } }
+
+ it 'calls popen3 with the provided environment variables' do
+ expect(Open3).to receive(:popen3).with(vars, 'ls', options)
+
+ klass.new.popen(%w(ls), path, { 'foobar' => 123 })
+ end
+ end
+
+ context 'use stdin' do
+ let(:result) { klass.new.popen(%w[cat], path) { |stdin| stdin.write 'hello' } }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to be_zero }
+ it { expect(output).to eq('hello') }
+ end
+ end
+
+ context 'popen_with_timeout' do
+ let(:timeout) { 1.second }
+
+ context 'no timeout' do
+ context 'zero status' do
+ let(:result) { klass.new.popen_with_timeout(%w(ls), timeout, path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to be_zero }
+ it { expect(output).to include('tests') }
+ end
+
+ context 'non-zero status' do
+ let(:result) { klass.new.popen_with_timeout(%w(cat NOTHING), timeout, path) }
+ let(:output) { result.first }
+ let(:status) { result.last }
+
+ it { expect(status).to eq(1) }
+ it { expect(output).to include('No such file or directory') }
+ end
+
+ context 'unsafe string command' do
+ it 'raises an error when it gets called with a string argument' do
+ expect { klass.new.popen_with_timeout('ls', timeout, path) }.to raise_error(RuntimeError)
+ end
+ end
+ end
+
+ context 'timeout' do
+ context 'timeout' do
+ it "raises a Timeout::Error" do
+ expect { klass.new.popen_with_timeout(%w(sleep 1000), timeout, path) }.to raise_error(Timeout::Error)
+ end
+
+ it "handles processes that do not shutdown correctly" do
+ expect { klass.new.popen_with_timeout(['bash', '-c', "trap -- '' SIGTERM; sleep 1000"], timeout, path) }.to raise_error(Timeout::Error)
+ end
+ end
+
+ context 'timeout period' do
+ let(:time_taken) do
+ begin
+ start = Time.now
+ klass.new.popen_with_timeout(%w(sleep 1000), timeout, path)
+ rescue
+ Time.now - start
+ end
+ end
+
+ it { expect(time_taken).to be >= timeout }
+ end
+
+ context 'clean up' do
+ let(:instance) { klass.new }
+
+ it 'kills the child process' do
+ expect(instance).to receive(:kill_process_group_for_pid).and_wrap_original do |m, *args|
+ # is the PID, and it should not be running at this point
+ m.call(*args)
+
+ pid = args.first
+ begin
+ Process.getpgid(pid)
+ raise "The child process should have been killed"
+ rescue Errno::ESRCH
+ end
+ end
+
+ expect { instance.popen_with_timeout(['bash', '-c', "trap -- '' SIGTERM; sleep 1000"], timeout, path) }.to raise_error(Timeout::Error)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index b11fa38856b..b2d2f770e3d 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -1525,6 +1525,45 @@ describe Gitlab::Git::Repository, seed_helper: true do
end
end
+ describe '#merge' do
+ let(:repository) do
+ Gitlab::Git::Repository.new('default', TEST_MUTABLE_REPO_PATH, '')
+ end
+ let(:source_sha) { '913c66a37b4a45b9769037c55c2d238bd0942d2e' }
+ let(:user) { build(:user) }
+ let(:target_branch) { 'test-merge-target-branch' }
+
+ before do
+ repository.create_branch(target_branch, '6d394385cf567f80a8fd85055db1ab4c5295806f')
+ end
+
+ after do
+ FileUtils.rm_rf(TEST_MUTABLE_REPO_PATH)
+ ensure_seeds
+ end
+
+ shared_examples '#merge' do
+ it 'can perform a merge' do
+ merge_commit_id = nil
+ result = repository.merge(user, source_sha, target_branch, 'Test merge') do |commit_id|
+ merge_commit_id = commit_id
+ end
+
+ expect(result.newrev).to eq(merge_commit_id)
+ expect(result.repo_created).to eq(false)
+ expect(result.branch_created).to eq(false)
+ end
+ end
+
+ context 'with gitaly' do
+ it_behaves_like '#merge'
+ end
+
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#merge'
+ end
+ end
+
def create_remote_branch(repository, remote_name, branch_name, source_branch_name)
source_branch = repository.branches.find { |branch| branch.name == source_branch_name }
rugged = repository.rugged
diff --git a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
index 98cf7966dad..c8d532df059 100644
--- a/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/circuit_breaker_spec.rb
@@ -10,18 +10,10 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
# Override test-settings for the circuitbreaker with something more realistic
# for these specs.
stub_storage_settings('default' => {
- 'path' => TestEnv.repos_path,
- 'failure_count_threshold' => 10,
- 'failure_wait_time' => 30,
- 'failure_reset_time' => 1800,
- 'storage_timeout' => 5
+ 'path' => TestEnv.repos_path
},
'broken' => {
- 'path' => 'tmp/tests/non-existent-repositories',
- 'failure_count_threshold' => 10,
- 'failure_wait_time' => 30,
- 'failure_reset_time' => 1800,
- 'storage_timeout' => 5
+ 'path' => 'tmp/tests/non-existent-repositories'
},
'nopath' => { 'path' => nil }
)
@@ -49,6 +41,10 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(key_exists).to be_falsey
end
+
+ it 'does not break when there are no keys in redis' do
+ expect { described_class.reset_all! }.not_to raise_error
+ end
end
describe '.for_storage' do
@@ -75,10 +71,39 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
expect(circuit_breaker.hostname).to eq(hostname)
expect(circuit_breaker.storage).to eq('default')
expect(circuit_breaker.storage_path).to eq(TestEnv.repos_path)
- expect(circuit_breaker.failure_count_threshold).to eq(10)
- expect(circuit_breaker.failure_wait_time).to eq(30)
- expect(circuit_breaker.failure_reset_time).to eq(1800)
- expect(circuit_breaker.storage_timeout).to eq(5)
+ end
+ end
+
+ context 'circuitbreaker settings' do
+ before do
+ stub_application_setting(circuitbreaker_failure_count_threshold: 0,
+ circuitbreaker_failure_wait_time: 1,
+ circuitbreaker_failure_reset_time: 2,
+ circuitbreaker_storage_timeout: 3)
+ end
+
+ describe '#failure_count_threshold' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.failure_count_threshold).to eq(0)
+ end
+ end
+
+ describe '#failure_wait_time' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.failure_wait_time).to eq(1)
+ end
+ end
+
+ describe '#failure_reset_time' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.failure_reset_time).to eq(2)
+ end
+ end
+
+ describe '#storage_timeout' do
+ it 'reads the value from settings' do
+ expect(circuit_breaker.storage_timeout).to eq(3)
+ end
end
end
@@ -151,10 +176,7 @@ describe Gitlab::Git::Storage::CircuitBreaker, clean_gitlab_redis_shared_state:
context 'the `failure_wait_time` is set to 0' do
before do
- stub_storage_settings('default' => {
- 'failure_wait_time' => 0,
- 'path' => TestEnv.repos_path
- })
+ stub_application_setting(circuitbreaker_failure_wait_time: 0)
end
it 'is working even when there is a recent failure' do
diff --git a/spec/lib/gitlab/git/storage/health_spec.rb b/spec/lib/gitlab/git/storage/health_spec.rb
index 2d3af387971..4a14a5201d1 100644
--- a/spec/lib/gitlab/git/storage/health_spec.rb
+++ b/spec/lib/gitlab/git/storage/health_spec.rb
@@ -20,36 +20,6 @@ describe Gitlab::Git::Storage::Health, clean_gitlab_redis_shared_state: true, br
end
end
- describe '.load_for_keys' do
- let(:subject) do
- results = Gitlab::Git::Storage.redis.with do |redis|
- fake_future = double
- allow(fake_future).to receive(:value).and_return([host1_key])
- described_class.load_for_keys({ 'broken' => fake_future }, redis)
- end
-
- # Make sure the `Redis#future is loaded
- results.inject({}) do |result, (name, info)|
- info.each { |i| i[:failure_count] = i[:failure_count].value.to_i }
-
- result[name] = info
-
- result
- end
- end
-
- it 'loads when there is no info in redis' do
- expect(subject).to eq('broken' => [{ name: host1_key, failure_count: 0 }])
- end
-
- it 'reads the correct values for a storage from redis' do
- set_in_redis(host1_key, 5)
- set_in_redis(host2_key, 7)
-
- expect(subject).to eq('broken' => [{ name: host1_key, failure_count: 5 }])
- end
- end
-
describe '.for_all_storages' do
it 'loads health status for all configured storages' do
healths = described_class.for_all_storages
diff --git a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
index 0e645008c88..7ee6d2f3709 100644
--- a/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
+++ b/spec/lib/gitlab/git/storage/null_circuit_breaker_spec.rb
@@ -54,6 +54,10 @@ describe Gitlab::Git::Storage::NullCircuitBreaker do
end
describe '#failure_count_threshold' do
+ before do
+ stub_application_setting(circuitbreaker_failure_count_threshold: 1)
+ end
+
it { expect(breaker.failure_count_threshold).to eq(1) }
end
diff --git a/spec/lib/gitlab/github_import/wiki_formatter_spec.rb b/spec/lib/gitlab/github_import/wiki_formatter_spec.rb
index fcd90fab547..2662cc20b32 100644
--- a/spec/lib/gitlab/github_import/wiki_formatter_spec.rb
+++ b/spec/lib/gitlab/github_import/wiki_formatter_spec.rb
@@ -11,7 +11,7 @@ describe Gitlab::GithubImport::WikiFormatter do
describe '#disk_path' do
it 'appends .wiki to project path' do
- expect(wiki.disk_path).to eq project.disk_path + '.wiki'
+ expect(wiki.disk_path).to eq project.wiki.disk_path
end
end
diff --git a/spec/lib/gitlab/group_hierarchy_spec.rb b/spec/lib/gitlab/group_hierarchy_spec.rb
index 8dc83a6db7f..30686634af4 100644
--- a/spec/lib/gitlab/group_hierarchy_spec.rb
+++ b/spec/lib/gitlab/group_hierarchy_spec.rb
@@ -18,6 +18,12 @@ describe Gitlab::GroupHierarchy, :postgresql do
expect(relation).to include(parent, child1)
end
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2)).base_and_ancestors(upto: child1)
+
+ expect(relation).to contain_exactly(child2)
+ end
+
it 'uses ancestors_base #initialize argument' do
relation = described_class.new(Group.where(id: child2.id), Group.none).base_and_ancestors
@@ -55,6 +61,28 @@ describe Gitlab::GroupHierarchy, :postgresql do
end
end
+ describe '#descendants' do
+ it 'includes only the descendants' do
+ relation = described_class.new(Group.where(id: parent)).descendants
+
+ expect(relation).to contain_exactly(child1, child2)
+ end
+ end
+
+ describe '#ancestors' do
+ it 'includes only the ancestors' do
+ relation = described_class.new(Group.where(id: child2)).ancestors
+
+ expect(relation).to contain_exactly(child1, parent)
+ end
+
+ it 'can find ancestors upto a certain level' do
+ relation = described_class.new(Group.where(id: child2)).ancestors(upto: child1)
+
+ expect(relation).to be_empty
+ end
+ end
+
describe '#all_groups' do
let(:relation) do
described_class.new(Group.where(id: child1.id)).all_groups
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 80d92b2e6a3..121c0ed04ed 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -496,6 +496,7 @@ Timelog:
- merge_request_id
- issue_id
- user_id
+- spent_at
- created_at
- updated_at
ProjectAutoDevops:
diff --git a/spec/lib/gitlab/multi_collection_paginator_spec.rb b/spec/lib/gitlab/multi_collection_paginator_spec.rb
new file mode 100644
index 00000000000..68bd4f93159
--- /dev/null
+++ b/spec/lib/gitlab/multi_collection_paginator_spec.rb
@@ -0,0 +1,46 @@
+require 'spec_helper'
+
+describe Gitlab::MultiCollectionPaginator do
+ subject(:paginator) { described_class.new(Project.all.order(:id), Group.all.order(:id), per_page: 3) }
+
+ it 'combines both collections' do
+ project = create(:project)
+ group = create(:group)
+
+ expect(paginator.paginate(1)).to eq([project, group])
+ end
+
+ it 'includes elements second collection if first collection is empty' do
+ group = create(:group)
+
+ expect(paginator.paginate(1)).to eq([group])
+ end
+
+ context 'with a full first page' do
+ let!(:all_groups) { create_list(:group, 4) }
+ let!(:all_projects) { create_list(:project, 4) }
+
+ it 'knows the total count of the collection' do
+ expect(paginator.total_count).to eq(8)
+ end
+
+ it 'fills the first page with elements of the first collection' do
+ expect(paginator.paginate(1)).to eq(all_projects.take(3))
+ end
+
+ it 'fils the second page with a mixture of of the first & second collection' do
+ first_collection_element = all_projects.last
+ second_collection_elements = all_groups.take(2)
+
+ expected_collection = [first_collection_element] + second_collection_elements
+
+ expect(paginator.paginate(2)).to eq(expected_collection)
+ end
+
+ it 'fils the last page with elements from the second collection' do
+ expected_collection = all_groups[-2..-1]
+
+ expect(paginator.paginate(3)).to eq(expected_collection)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb
index 1f1c48ee9b5..f1f188cbfb5 100644
--- a/spec/lib/gitlab/path_regex_spec.rb
+++ b/spec/lib/gitlab/path_regex_spec.rb
@@ -213,7 +213,7 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('activity/')
expect(subject).to match('group_members/')
- expect(subject).to match('subgroups/')
+ expect(subject).to match('labels/')
end
it 'is not case sensitive' do
@@ -246,7 +246,7 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('activity/')
expect(subject).to match('group_members/')
- expect(subject).to match('subgroups/')
+ expect(subject).to match('labels/')
end
end
@@ -268,7 +268,7 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('activity/more/')
expect(subject).to match('group_members/more/')
- expect(subject).to match('subgroups/more/')
+ expect(subject).to match('labels/more/')
end
end
end
@@ -292,7 +292,7 @@ describe Gitlab::PathRegex do
it 'rejects group routes' do
expect(subject).not_to match('root/activity/')
expect(subject).not_to match('root/group_members/')
- expect(subject).not_to match('root/subgroups/')
+ expect(subject).not_to match('root/labels/')
end
end
@@ -314,7 +314,7 @@ describe Gitlab::PathRegex do
it 'rejects group routes' do
expect(subject).not_to match('root/activity/more/')
expect(subject).not_to match('root/group_members/more/')
- expect(subject).not_to match('root/subgroups/more/')
+ expect(subject).not_to match('root/labels/more/')
end
end
end
@@ -349,7 +349,7 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('activity/')
expect(subject).to match('group_members/')
- expect(subject).to match('subgroups/')
+ expect(subject).to match('labels/')
end
it 'is not case sensitive' do
@@ -382,7 +382,7 @@ describe Gitlab::PathRegex do
it 'accepts group routes' do
expect(subject).to match('root/activity/')
expect(subject).to match('root/group_members/')
- expect(subject).to match('root/subgroups/')
+ expect(subject).to match('root/labels/')
end
it 'is not case sensitive' do
diff --git a/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
new file mode 100644
index 00000000000..8b58f0b3725
--- /dev/null
+++ b/spec/lib/gitlab/quick_actions/spend_time_and_date_separator_spec.rb
@@ -0,0 +1,81 @@
+require 'spec_helper'
+
+describe Gitlab::QuickActions::SpendTimeAndDateSeparator do
+ subject { described_class }
+
+ shared_examples 'arg line with invalid parameters' do
+ it 'return nil' do
+ expect(subject.new(invalid_arg).execute).to eq(nil)
+ end
+ end
+
+ shared_examples 'arg line with valid parameters' do
+ it 'return time and date array' do
+ expect(subject.new(valid_arg).execute).to eq(expected_response)
+ end
+ end
+
+ describe '#execute' do
+ context 'invalid paramenter in arg line' do
+ context 'empty arg line' do
+ it_behaves_like 'arg line with invalid parameters' do
+ let(:invalid_arg) { '' }
+ end
+ end
+
+ context 'future date in arg line' do
+ it_behaves_like 'arg line with invalid parameters' do
+ let(:invalid_arg) { '10m 6023-02-02' }
+ end
+ end
+
+ context 'unparseable date(invalid mixes of delimiters)' do
+ it_behaves_like 'arg line with invalid parameters' do
+ let(:invalid_arg) { '10m 2017.02-02' }
+ end
+ end
+
+ context 'trash in arg line' do
+ let(:invalid_arg) { 'dfjkghdskjfghdjskfgdfg' }
+
+ it 'return nil as time value' do
+ time_date_response = subject.new(invalid_arg).execute
+
+ expect(time_date_response).to be_an_instance_of(Array)
+ expect(time_date_response.first).to eq(nil)
+ end
+ end
+ end
+
+ context 'only time present in arg line' do
+ it_behaves_like 'arg line with valid parameters' do
+ let(:valid_arg) { '2m 3m 5m 1h' }
+ let(:time) { Gitlab::TimeTrackingFormatter.parse(valid_arg) }
+ let(:date) { DateTime.now.to_date }
+ let(:expected_response) { [time, date] }
+ end
+ end
+
+ context 'simple time with date in arg line' do
+ it_behaves_like 'arg line with valid parameters' do
+ let(:raw_time) { '10m' }
+ let(:raw_date) { '2016-02-02' }
+ let(:valid_arg) { "#{raw_time} #{raw_date}" }
+ let(:date) { Date.parse(raw_date) }
+ let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) }
+ let(:expected_response) { [time, date] }
+ end
+ end
+
+ context 'composite time with date in arg line' do
+ it_behaves_like 'arg line with valid parameters' do
+ let(:raw_time) { '2m 10m 1h 3d' }
+ let(:raw_date) { '2016/02/02' }
+ let(:valid_arg) { "#{raw_time} #{raw_date}" }
+ let(:date) { Date.parse(raw_date) }
+ let(:time) { Gitlab::TimeTrackingFormatter.parse(raw_time) }
+ let(:expected_response) { [time, date] }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/saml/auth_hash_spec.rb b/spec/lib/gitlab/saml/auth_hash_spec.rb
new file mode 100644
index 00000000000..a555935aea3
--- /dev/null
+++ b/spec/lib/gitlab/saml/auth_hash_spec.rb
@@ -0,0 +1,40 @@
+require 'spec_helper'
+
+describe Gitlab::Saml::AuthHash do
+ include LoginHelpers
+
+ let(:raw_info_attr) { { 'groups' => %w(Developers Freelancers) } }
+ subject(:saml_auth_hash) { described_class.new(omniauth_auth_hash) }
+
+ let(:info_hash) do
+ {
+ name: 'John',
+ email: 'john@mail.com'
+ }
+ end
+
+ let(:omniauth_auth_hash) do
+ OmniAuth::AuthHash.new(uid: 'my-uid',
+ provider: 'saml',
+ info: info_hash,
+ extra: { raw_info: OneLogin::RubySaml::Attributes.new(raw_info_attr) } )
+ end
+
+ before do
+ stub_saml_group_config(%w(Developers Freelancers Designers))
+ end
+
+ describe '#groups' do
+ it 'returns array of groups' do
+ expect(saml_auth_hash.groups).to eq(%w(Developers Freelancers))
+ end
+
+ context 'raw info hash attributes empty' do
+ let(:raw_info_attr) { {} }
+
+ it 'returns an empty array' do
+ expect(saml_auth_hash.groups).to be_a(Array)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/saml/user_spec.rb b/spec/lib/gitlab/saml/user_spec.rb
index 59923bfb14d..1c23fb5f285 100644
--- a/spec/lib/gitlab/saml/user_spec.rb
+++ b/spec/lib/gitlab/saml/user_spec.rb
@@ -2,13 +2,15 @@ require 'spec_helper'
describe Gitlab::Saml::User do
include LdapHelpers
+ include LoginHelpers
let(:saml_user) { described_class.new(auth_hash) }
let(:gl_user) { saml_user.gl_user }
let(:uid) { 'my-uid' }
let(:dn) { 'uid=user1,ou=People,dc=example' }
let(:provider) { 'saml' }
- let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash, extra: { raw_info: OneLogin::RubySaml::Attributes.new({ 'groups' => %w(Developers Freelancers Designers) }) }) }
+ let(:raw_info_attr) { { 'groups' => %w(Developers Freelancers Designers) } }
+ let(:auth_hash) { OmniAuth::AuthHash.new(uid: uid, provider: provider, info: info_hash, extra: { raw_info: OneLogin::RubySaml::Attributes.new(raw_info_attr) }) }
let(:info_hash) do
{
name: 'John',
@@ -18,22 +20,6 @@ describe Gitlab::Saml::User do
let(:ldap_user) { Gitlab::LDAP::Person.new(Net::LDAP::Entry.new, 'ldapmain') }
describe '#save' do
- def stub_omniauth_config(messages)
- allow(Gitlab.config.omniauth).to receive_messages(messages)
- end
-
- def stub_ldap_config(messages)
- allow(Gitlab::LDAP::Config).to receive_messages(messages)
- end
-
- def stub_basic_saml_config
- allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', args: {} } })
- end
-
- def stub_saml_group_config(groups)
- allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', groups_attribute: 'groups', external_groups: groups, args: {} } })
- end
-
before do
stub_basic_saml_config
end
@@ -402,4 +388,16 @@ describe Gitlab::Saml::User do
end
end
end
+
+ describe '#find_user' do
+ context 'raw info hash attributes empty' do
+ let(:raw_info_attr) { {} }
+
+ it 'does not mark user as external' do
+ stub_saml_group_config(%w(Freelancers))
+
+ expect(saml_user.find_user.external).to be_falsy
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/sidekiq_status_spec.rb b/spec/lib/gitlab/sidekiq_status_spec.rb
index c2e77ef6b6c..884f27b212c 100644
--- a/spec/lib/gitlab/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/sidekiq_status_spec.rb
@@ -39,6 +39,18 @@ describe Gitlab::SidekiqStatus do
end
end
+ describe '.running?', :clean_gitlab_redis_shared_state do
+ it 'returns true if job is running' do
+ described_class.set('123')
+
+ expect(described_class.running?('123')).to be(true)
+ end
+
+ it 'returns false if job is not found' do
+ expect(described_class.running?('123')).to be(false)
+ end
+ end
+
describe '.num_running', :clean_gitlab_redis_shared_state do
it 'returns 0 if all jobs have been completed' do
expect(described_class.num_running(%w(123))).to eq(0)
diff --git a/spec/lib/gitlab/sql/union_spec.rb b/spec/lib/gitlab/sql/union_spec.rb
index 8026fba9f0a..fe6422c32b6 100644
--- a/spec/lib/gitlab/sql/union_spec.rb
+++ b/spec/lib/gitlab/sql/union_spec.rb
@@ -29,5 +29,12 @@ describe Gitlab::SQL::Union do
expect(union.to_sql).to include('UNION ALL')
end
+
+ it 'returns `NULL` if all relations are empty' do
+ empty_relation = User.none
+ union = described_class.new([empty_relation, empty_relation])
+
+ expect(union.to_sql).to eq('NULL')
+ end
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 777e9c8e21d..a7b65e94706 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -61,6 +61,8 @@ describe Gitlab::UsageData do
deployments
environments
gcp_clusters
+ gcp_clusters_enabled
+ gcp_clusters_disabled
in_review_folder
groups
issues
diff --git a/spec/lib/gitlab/utils/merge_hash_spec.rb b/spec/lib/gitlab/utils/merge_hash_spec.rb
new file mode 100644
index 00000000000..4fa7bb31301
--- /dev/null
+++ b/spec/lib/gitlab/utils/merge_hash_spec.rb
@@ -0,0 +1,33 @@
+require 'spec_helper'
+describe Gitlab::Utils::MergeHash do
+ describe '.crush' do
+ it 'can flatten a hash to each element' do
+ input = { hello: "world", this: { crushes: ["an entire", "hash"] } }
+ expected_result = [:hello, "world", :this, :crushes, "an entire", "hash"]
+
+ expect(described_class.crush(input)).to eq(expected_result)
+ end
+ end
+
+ describe '.elements' do
+ it 'deep merges an array of elements' do
+ input = [{ hello: ["world"] },
+ { hello: "Everyone" },
+ { hello: { greetings: ['Bonjour', 'Hello', 'Hallo', 'Dzień dobry'] } },
+ "Goodbye", "Hallo"]
+ expected_output = [
+ {
+ hello:
+ [
+ "world",
+ "Everyone",
+ { greetings: ['Bonjour', 'Hello', 'Hallo', 'Dzień dobry'] }
+ ]
+ },
+ "Goodbye"
+ ]
+
+ expect(described_class.merge(input)).to eq(expected_output)
+ end
+ end
+end
diff --git a/spec/migrations/migrate_user_project_view_spec.rb b/spec/migrations/migrate_user_project_view_spec.rb
index afaa5d836a7..5e16769d63a 100644
--- a/spec/migrations/migrate_user_project_view_spec.rb
+++ b/spec/migrations/migrate_user_project_view_spec.rb
@@ -5,12 +5,7 @@ require Rails.root.join('db', 'post_migrate', '20170406142253_migrate_user_proje
describe MigrateUserProjectView, :truncate do
let(:migration) { described_class.new }
- let!(:user) { create(:user) }
-
- before do
- # 0 is the numeric value for the old 'readme' option
- user.update_column(:project_view, 0)
- end
+ let!(:user) { create(:user, project_view: 'readme') }
describe '#up' do
it 'updates project view setting with new value' do
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 78cacf9ff5d..30495fd4f5e 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -114,6 +114,19 @@ describe ApplicationSetting do
it { expect(setting.repository_storages).to eq(['default']) }
end
+ context 'circuitbreaker settings' do
+ [:circuitbreaker_failure_count_threshold,
+ :circuitbreaker_failure_wait_time,
+ :circuitbreaker_failure_reset_time,
+ :circuitbreaker_storage_timeout].each do |field|
+ it "Validates #{field} as number" do
+ is_expected.to validate_numericality_of(field)
+ .only_integer
+ .is_greater_than_or_equal_to(0)
+ end
+ end
+ end
+
context 'repository storages' do
before do
storages = {
@@ -207,6 +220,31 @@ describe ApplicationSetting do
expect(described_class.current).to eq(:last)
end
end
+
+ context 'when an ApplicationSetting is not yet present' do
+ it 'does not cache nil object' do
+ # when missing settings a nil object is returned, but not cached
+ allow(described_class).to receive(:last).and_return(nil).twice
+ expect(described_class.current).to be_nil
+
+ # when the settings are set the method returns a valid object
+ allow(described_class).to receive(:last).and_return(:last)
+ expect(described_class.current).to eq(:last)
+
+ # subsequent calls get everything from cache
+ expect(described_class.current).to eq(:last)
+ end
+ end
+ end
+
+ context 'restrict creating duplicates' do
+ before do
+ described_class.create_from_defaults
+ end
+
+ it 'raises an record creation violation if already created' do
+ expect { described_class.create_from_defaults }.to raise_error(ActiveRecord::RecordNotUnique)
+ end
end
context 'restricted signup domains' do
diff --git a/spec/models/blob_viewer/readme_spec.rb b/spec/models/blob_viewer/readme_spec.rb
index 926df21ffda..b9946c0315a 100644
--- a/spec/models/blob_viewer/readme_spec.rb
+++ b/spec/models/blob_viewer/readme_spec.rb
@@ -37,7 +37,7 @@ describe BlobViewer::Readme do
context 'when the wiki is not empty' do
before do
- WikiPages::CreateService.new(project, project.owner, title: 'home', content: 'Home page').execute
+ create(:wiki_page, wiki: project.wiki, attrs: { title: 'home', content: 'Home page' })
end
it 'returns nil' do
diff --git a/spec/models/ci/artifact_blob_spec.rb b/spec/models/ci/artifact_blob_spec.rb
index d5ba088af53..4e72d9d748e 100644
--- a/spec/models/ci/artifact_blob_spec.rb
+++ b/spec/models/ci/artifact_blob_spec.rb
@@ -56,15 +56,14 @@ describe Ci::ArtifactBlob do
end
context 'txt extensions' do
- let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/doc_sample.txt') }
+ let(:path) { 'other_artifacts_0.1.2/doc_sample.txt' }
+ let(:entry) { build.artifacts_metadata_entry(path) }
it 'returns a URL' do
url = subject.external_url(build.project, build)
expect(url).not_to be_nil
- expect(url).to start_with("http")
- expect(url).to match Gitlab.config.pages.host
- expect(url).to end_with(entry.path)
+ expect(url).to eq("http://#{project.namespace.path}.#{Gitlab.config.pages.host}/-/#{project.path}/-/jobs/#{build.id}/artifacts/#{path}")
end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 06f76b5501e..41ecdb604f1 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1743,19 +1743,34 @@ describe Ci::Build do
end
describe 'state transition when build fails' do
+ let(:service) { MergeRequests::AddTodoWhenBuildFailsService.new(project, user) }
+
+ before do
+ allow(MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).and_return(service)
+ allow(service).to receive(:close)
+ end
+
context 'when build is configured to be retried' do
- subject { create(:ci_build, :running, options: { retry: 3 }) }
+ subject { create(:ci_build, :running, options: { retry: 3 }, project: project, user: user) }
- it 'retries builds and assigns a same user to it' do
+ it 'retries build and assigns the same user to it' do
expect(described_class).to receive(:retry)
- .with(subject, subject.user)
+ .with(subject, user)
+
+ subject.drop!
+ end
+
+ it 'does not try to create a todo' do
+ project.add_developer(user)
+
+ expect(service).not_to receive(:commit_status_merge_requests)
subject.drop!
end
end
context 'when build is not configured to be retried' do
- subject { create(:ci_build, :running) }
+ subject { create(:ci_build, :running, project: project, user: user) }
it 'does not retry build' do
expect(described_class).not_to receive(:retry)
@@ -1770,6 +1785,14 @@ describe Ci::Build do
subject.drop!
end
+
+ it 'creates a todo' do
+ project.add_developer(user)
+
+ expect(service).to receive(:commit_status_merge_requests)
+
+ subject.drop!
+ end
end
end
end
diff --git a/spec/models/concerns/group_descendant_spec.rb b/spec/models/concerns/group_descendant_spec.rb
new file mode 100644
index 00000000000..c163fb01a81
--- /dev/null
+++ b/spec/models/concerns/group_descendant_spec.rb
@@ -0,0 +1,166 @@
+require 'spec_helper'
+
+describe GroupDescendant, :nested_groups do
+ let(:parent) { create(:group) }
+ let(:subgroup) { create(:group, parent: parent) }
+ let(:subsub_group) { create(:group, parent: subgroup) }
+
+ def all_preloaded_groups(*groups)
+ groups + [parent, subgroup, subsub_group]
+ end
+
+ context 'for a group' do
+ describe '#hierarchy' do
+ it 'only queries once for the ancestors' do
+ # make sure the subsub_group does not have anything cached
+ test_group = create(:group, parent: subsub_group).reload
+
+ query_count = ActiveRecord::QueryRecorder.new { test_group.hierarchy }.count
+
+ expect(query_count).to eq(1)
+ end
+
+ it 'only queries once for the ancestors when a top is given' do
+ test_group = create(:group, parent: subsub_group).reload
+
+ recorder = ActiveRecord::QueryRecorder.new { test_group.hierarchy(subgroup) }
+ expect(recorder.count).to eq(1)
+ end
+
+ it 'builds a hierarchy for a group' do
+ expected_hierarchy = { parent => { subgroup => subsub_group } }
+
+ expect(subsub_group.hierarchy).to eq(expected_hierarchy)
+ end
+
+ it 'builds a hierarchy upto a specified parent' do
+ expected_hierarchy = { subgroup => subsub_group }
+
+ expect(subsub_group.hierarchy(parent)).to eq(expected_hierarchy)
+ end
+
+ it 'raises an error if specifying a base that is not part of the tree' do
+ expect { subsub_group.hierarchy(build_stubbed(:group)) }
+ .to raise_error('specified top is not part of the tree')
+ end
+ end
+
+ describe '.build_hierarchy' do
+ it 'combines hierarchies until the top' do
+ other_subgroup = create(:group, parent: parent)
+ other_subsub_group = create(:group, parent: subgroup)
+
+ groups = all_preloaded_groups(other_subgroup, subsub_group, other_subsub_group)
+
+ expected_hierarchy = { parent => [other_subgroup, { subgroup => [subsub_group, other_subsub_group] }] }
+
+ expect(described_class.build_hierarchy(groups)).to eq(expected_hierarchy)
+ end
+
+ it 'combines upto a given parent' do
+ other_subgroup = create(:group, parent: parent)
+ other_subsub_group = create(:group, parent: subgroup)
+
+ groups = [other_subgroup, subsub_group, other_subsub_group]
+ groups << subgroup # Add the parent as if it was preloaded
+
+ expected_hierarchy = [other_subgroup, { subgroup => [subsub_group, other_subsub_group] }]
+ expect(described_class.build_hierarchy(groups, parent)).to eq(expected_hierarchy)
+ end
+
+ it 'handles building a tree out of order' do
+ other_subgroup = create(:group, parent: parent)
+ other_subgroup2 = create(:group, parent: parent)
+ other_subsub_group = create(:group, parent: other_subgroup)
+
+ groups = all_preloaded_groups(subsub_group, other_subgroup2, other_subsub_group, other_subgroup)
+ expected_hierarchy = { parent => [{ subgroup => subsub_group }, other_subgroup2, { other_subgroup => other_subsub_group }] }
+
+ expect(described_class.build_hierarchy(groups)).to eq(expected_hierarchy)
+ end
+
+ it 'raises an error if not all elements were preloaded' do
+ expect { described_class.build_hierarchy([subsub_group]) }
+ .to raise_error('parent was not preloaded')
+ end
+ end
+ end
+
+ context 'for a project' do
+ let(:project) { create(:project, namespace: subsub_group) }
+
+ describe '#hierarchy' do
+ it 'builds a hierarchy for a project' do
+ expected_hierarchy = { parent => { subgroup => { subsub_group => project } } }
+
+ expect(project.hierarchy).to eq(expected_hierarchy)
+ end
+
+ it 'builds a hierarchy upto a specified parent' do
+ expected_hierarchy = { subsub_group => project }
+
+ expect(project.hierarchy(subgroup)).to eq(expected_hierarchy)
+ end
+ end
+
+ describe '.build_hierarchy' do
+ it 'combines hierarchies until the top' do
+ other_project = create(:project, namespace: parent)
+ other_subgroup_project = create(:project, namespace: subgroup)
+
+ elements = all_preloaded_groups(other_project, subsub_group, other_subgroup_project)
+
+ expected_hierarchy = { parent => [other_project, { subgroup => [subsub_group, other_subgroup_project] }] }
+
+ expect(described_class.build_hierarchy(elements)).to eq(expected_hierarchy)
+ end
+
+ it 'combines upto a given parent' do
+ other_project = create(:project, namespace: parent)
+ other_subgroup_project = create(:project, namespace: subgroup)
+
+ elements = [other_project, subsub_group, other_subgroup_project]
+ elements << subgroup # Added as if it was preloaded
+
+ expected_hierarchy = [other_project, { subgroup => [subsub_group, other_subgroup_project] }]
+
+ expect(described_class.build_hierarchy(elements, parent)).to eq(expected_hierarchy)
+ end
+
+ it 'merges to elements in the same hierarchy' do
+ expected_hierarchy = { parent => subgroup }
+
+ expect(described_class.build_hierarchy([parent, subgroup])).to eq(expected_hierarchy)
+ end
+
+ it 'merges complex hierarchies' do
+ project = create(:project, namespace: parent)
+ sub_project = create(:project, namespace: subgroup)
+ subsubsub_group = create(:group, parent: subsub_group)
+ subsub_project = create(:project, namespace: subsub_group)
+ subsubsub_project = create(:project, namespace: subsubsub_group)
+ other_subgroup = create(:group, parent: parent)
+ other_subproject = create(:project, namespace: other_subgroup)
+
+ elements = [project, subsubsub_project, sub_project, other_subproject, subsub_project]
+ # Add parent groups as if they were preloaded
+ elements += [other_subgroup, subsubsub_group, subsub_group, subgroup]
+
+ expected_hierarchy = [
+ project,
+ {
+ subgroup => [
+ { subsub_group => [{ subsubsub_group => subsubsub_project }, subsub_project] },
+ sub_project
+ ]
+ },
+ { other_subgroup => other_subproject }
+ ]
+
+ actual_hierarchy = described_class.build_hierarchy(elements, parent)
+
+ expect(actual_hierarchy).to eq(expected_hierarchy)
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/loaded_in_group_list_spec.rb b/spec/models/concerns/loaded_in_group_list_spec.rb
new file mode 100644
index 00000000000..7a279547a3a
--- /dev/null
+++ b/spec/models/concerns/loaded_in_group_list_spec.rb
@@ -0,0 +1,49 @@
+require 'spec_helper'
+
+describe LoadedInGroupList do
+ let(:parent) { create(:group) }
+ subject(:found_group) { Group.with_selects_for_list.find_by(id: parent.id) }
+
+ describe '.with_selects_for_list' do
+ it 'includes the preloaded counts for groups' do
+ create(:group, parent: parent)
+ create(:project, namespace: parent)
+ parent.add_developer(create(:user))
+
+ found_group = Group.with_selects_for_list.find_by(id: parent.id)
+
+ expect(found_group.preloaded_project_count).to eq(1)
+ expect(found_group.preloaded_subgroup_count).to eq(1)
+ expect(found_group.preloaded_member_count).to eq(1)
+ end
+
+ context 'with archived projects' do
+ it 'counts including archived projects when `true` is passed' do
+ create(:project, namespace: parent, archived: true)
+ create(:project, namespace: parent)
+
+ found_group = Group.with_selects_for_list(archived: 'true').find_by(id: parent.id)
+
+ expect(found_group.preloaded_project_count).to eq(2)
+ end
+
+ it 'counts only archived projects when `only` is passed' do
+ create_list(:project, 2, namespace: parent, archived: true)
+ create(:project, namespace: parent)
+
+ found_group = Group.with_selects_for_list(archived: 'only').find_by(id: parent.id)
+
+ expect(found_group.preloaded_project_count).to eq(2)
+ end
+ end
+ end
+
+ describe '#children_count' do
+ it 'counts groups and projects' do
+ create(:group, parent: parent)
+ create(:project, namespace: parent)
+
+ expect(found_group.children_count).to eq(2)
+ end
+ end
+end
diff --git a/spec/models/diff_note_spec.rb b/spec/models/diff_note_spec.rb
index eb0a3e9e0d3..da972d2d86a 100644
--- a/spec/models/diff_note_spec.rb
+++ b/spec/models/diff_note_spec.rb
@@ -105,7 +105,7 @@ describe DiffNote do
describe "#line_code" do
it "returns the correct line code" do
- line_code = Gitlab::Diff::LineCode.generate(position.file_path, position.formatter.new_line, 15)
+ line_code = Gitlab::Git.diff_line_code(position.file_path, position.formatter.new_line, 15)
expect(subject.line_code).to eq(line_code)
end
diff --git a/spec/models/gcp/cluster_spec.rb b/spec/models/gcp/cluster_spec.rb
index 350fbc257d9..8f39fff6394 100644
--- a/spec/models/gcp/cluster_spec.rb
+++ b/spec/models/gcp/cluster_spec.rb
@@ -7,6 +7,30 @@ describe Gcp::Cluster do
it { is_expected.to validate_presence_of(:gcp_cluster_zone) }
+ describe '.enabled' do
+ subject { described_class.enabled }
+
+ let!(:cluster) { create(:gcp_cluster, enabled: true) }
+
+ before do
+ create(:gcp_cluster, enabled: false)
+ end
+
+ it { is_expected.to contain_exactly(cluster) }
+ end
+
+ describe '.disabled' do
+ subject { described_class.disabled }
+
+ let!(:cluster) { create(:gcp_cluster, enabled: false) }
+
+ before do
+ create(:gcp_cluster, enabled: true)
+ end
+
+ it { is_expected.to contain_exactly(cluster) }
+ end
+
describe '#default_value_for' do
let(:cluster) { described_class.new }
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 17c9f15b021..73e038b61ed 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -1460,11 +1460,31 @@ describe MergeRequest do
end
describe '#merge_ongoing?' do
- it 'returns true when merge_id is present and MR is not merged' do
+ it 'returns true when merge_id, MR is not merged and it has no running job' do
merge_request = build_stubbed(:merge_request, state: :open, merge_jid: 'foo')
+ allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { true }
expect(merge_request.merge_ongoing?).to be(true)
end
+
+ it 'returns false when merge_jid is nil' do
+ merge_request = build_stubbed(:merge_request, state: :open, merge_jid: nil)
+
+ expect(merge_request.merge_ongoing?).to be(false)
+ end
+
+ it 'returns false if MR is merged' do
+ merge_request = build_stubbed(:merge_request, state: :merged, merge_jid: 'foo')
+
+ expect(merge_request.merge_ongoing?).to be(false)
+ end
+
+ it 'returns false if there is no merge job running' do
+ merge_request = build_stubbed(:merge_request, state: :open, merge_jid: 'foo')
+ allow(Gitlab::SidekiqStatus).to receive(:running?).with('foo') { false }
+
+ expect(merge_request.merge_ongoing?).to be(false)
+ end
end
describe "#closed_without_fork?" do
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 2ebf6acd42a..90b768f595e 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -4,6 +4,7 @@ describe Namespace do
include ProjectForksHelper
let!(:namespace) { create(:namespace) }
+ let(:gitlab_shell) { Gitlab::Shell.new }
describe 'associations' do
it { is_expected.to have_many :projects }
@@ -153,25 +154,32 @@ describe Namespace do
end
end
- describe '#move_dir' do
- let(:namespace) { create(:namespace) }
- let!(:project) { create(:project_empty_repo, namespace: namespace) }
+ describe '#ancestors_upto', :nested_groups do
+ let(:parent) { create(:group) }
+ let(:child) { create(:group, parent: parent) }
+ let(:child2) { create(:group, parent: child) }
- before do
- allow(namespace).to receive(:path_changed?).and_return(true)
+ it 'returns all ancestors when no namespace is given' do
+ expect(child2.ancestors_upto).to contain_exactly(child, parent)
+ end
+
+ it 'includes ancestors upto but excluding the given ancestor' do
+ expect(child2.ancestors_upto(parent)).to contain_exactly(child)
end
+ end
+
+ describe '#move_dir', :request_store do
+ let(:namespace) { create(:namespace) }
+ let!(:project) { create(:project_empty_repo, namespace: namespace) }
it "raises error when directory exists" do
expect { namespace.move_dir }.to raise_error("namespace directory cannot be moved")
end
it "moves dir if path changed" do
- new_path = namespace.full_path + "_new"
+ namespace.update_attributes(path: namespace.full_path + '_new')
- allow(namespace).to receive(:full_path_was).and_return(namespace.full_path)
- allow(namespace).to receive(:full_path).and_return(new_path)
- expect(namespace).to receive(:remove_exports!)
- expect(namespace.move_dir).to be_truthy
+ expect(gitlab_shell.exists?(project.repository_storage_path, "#{namespace.path}/#{project.path}.git")).to be_truthy
end
context "when any project has container images" do
diff --git a/spec/models/project_services/microsoft_teams_service_spec.rb b/spec/models/project_services/microsoft_teams_service_spec.rb
index f89be20ad78..6a5d0decfec 100644
--- a/spec/models/project_services/microsoft_teams_service_spec.rb
+++ b/spec/models/project_services/microsoft_teams_service_spec.rb
@@ -108,12 +108,8 @@ describe MicrosoftTeamsService do
message: "user created page: Awesome wiki_page"
}
end
-
- let(:wiki_page_sample_data) do
- service = WikiPages::CreateService.new(project, user, opts)
- wiki_page = service.execute
- Gitlab::DataBuilder::WikiPage.build(wiki_page, user, 'create')
- end
+ let(:wiki_page) { create(:wiki_page, wiki: project.wiki, attrs: opts) }
+ let(:wiki_page_sample_data) { Gitlab::DataBuilder::WikiPage.build(wiki_page, user, 'create') }
it "calls Microsoft Teams API" do
chat_service.execute(wiki_page_sample_data)
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index cf26dbfea49..74eba7e33f6 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -1761,6 +1761,21 @@ describe Project do
it { expect(project.gitea_import?).to be true }
end
+ describe '#ancestors_upto', :nested_groups do
+ let(:parent) { create(:group) }
+ let(:child) { create(:group, parent: parent) }
+ let(:child2) { create(:group, parent: child) }
+ let(:project) { create(:project, namespace: child2) }
+
+ it 'returns all ancestors when no namespace is given' do
+ expect(project.ancestors_upto).to contain_exactly(child2, child, parent)
+ end
+
+ it 'includes ancestors upto but excluding the given ancestor' do
+ expect(project.ancestors_upto(parent)).to contain_exactly(child2, child)
+ end
+ end
+
describe '#lfs_enabled?' do
let(:project) { create(:project) }
@@ -2178,6 +2193,12 @@ describe Project do
it { expect(project.parent).to eq(project.namespace) }
end
+ describe '#parent_id' do
+ let(:project) { create(:project) }
+
+ it { expect(project.parent_id).to eq(project.namespace_id) }
+ end
+
describe '#parent_changed?' do
let(:project) { create(:project) }
diff --git a/spec/models/project_wiki_spec.rb b/spec/models/project_wiki_spec.rb
index 78fb2df884a..f10d9383ae2 100644
--- a/spec/models/project_wiki_spec.rb
+++ b/spec/models/project_wiki_spec.rb
@@ -180,37 +180,47 @@ describe ProjectWiki do
end
describe "#create_page" do
- after do
- destroy_page(subject.pages.first.page)
- end
+ shared_examples 'creating a wiki page' do
+ after do
+ destroy_page(subject.pages.first.page)
+ end
- it "creates a new wiki page" do
- expect(subject.create_page("test page", "this is content")).not_to eq(false)
- expect(subject.pages.count).to eq(1)
- end
+ it "creates a new wiki page" do
+ expect(subject.create_page("test page", "this is content")).not_to eq(false)
+ expect(subject.pages.count).to eq(1)
+ end
- it "returns false when a duplicate page exists" do
- subject.create_page("test page", "content")
- expect(subject.create_page("test page", "content")).to eq(false)
- end
+ it "returns false when a duplicate page exists" do
+ subject.create_page("test page", "content")
+ expect(subject.create_page("test page", "content")).to eq(false)
+ end
- it "stores an error message when a duplicate page exists" do
- 2.times { subject.create_page("test page", "content") }
- expect(subject.error_message).to match(/Duplicate page:/)
- end
+ it "stores an error message when a duplicate page exists" do
+ 2.times { subject.create_page("test page", "content") }
+ expect(subject.error_message).to match(/Duplicate page:/)
+ end
- it "sets the correct commit message" do
- subject.create_page("test page", "some content", :markdown, "commit message")
- expect(subject.pages.first.page.version.message).to eq("commit message")
- end
+ it "sets the correct commit message" do
+ subject.create_page("test page", "some content", :markdown, "commit message")
+ expect(subject.pages.first.page.version.message).to eq("commit message")
+ end
- it 'updates project activity' do
- subject.create_page('Test Page', 'This is content')
+ it 'updates project activity' do
+ subject.create_page('Test Page', 'This is content')
- project.reload
+ project.reload
- expect(project.last_activity_at).to be_within(1.minute).of(Time.now)
- expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now)
+ expect(project.last_activity_at).to be_within(1.minute).of(Time.now)
+ expect(project.last_repository_updated_at).to be_within(1.minute).of(Time.now)
+ end
+ end
+
+ context 'when Gitaly wiki_write_page is enabled' do
+ it_behaves_like 'creating a wiki page'
+ end
+
+ context 'when Gitaly wiki_write_page is disabled', :skip_gitaly_mock do
+ it_behaves_like 'creating a wiki page'
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index f44693a71bb..39d188f18af 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -1286,21 +1286,31 @@ describe Repository do
let(:message) { 'Test \r\n\r\n message' }
- it 'merges the code and returns the commit id' do
- expect(merge_commit).to be_present
- expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
- end
+ shared_examples '#merge' do
+ it 'merges the code and returns the commit id' do
+ expect(merge_commit).to be_present
+ expect(repository.blob_at(merge_commit.id, 'files/ruby/feature.rb')).to be_present
+ end
- it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
- merge_commit_id = merge(repository, user, merge_request, message)
+ it 'sets the `in_progress_merge_commit_sha` flag for the given merge request' do
+ merge_commit_id = merge(repository, user, merge_request, message)
- expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
+ expect(merge_request.in_progress_merge_commit_sha).to eq(merge_commit_id)
+ end
+
+ it 'removes carriage returns from commit message' do
+ merge_commit_id = merge(repository, user, merge_request, message)
+
+ expect(repository.commit(merge_commit_id).message).to eq(message.delete("\r"))
+ end
end
- it 'removes carriage returns from commit message' do
- merge_commit_id = merge(repository, user, merge_request, message)
+ context 'with gitaly' do
+ it_behaves_like '#merge'
+ end
- expect(repository.commit(merge_commit_id).message).to eq(message.delete("\r"))
+ context 'without gitaly', :skip_gitaly_mock do
+ it_behaves_like '#merge'
end
def merge(repository, user, merge_request, message)
diff --git a/spec/requests/api/branches_spec.rb b/spec/requests/api/branches_spec.rb
index 16b12446ed4..e433597f58b 100644
--- a/spec/requests/api/branches_spec.rb
+++ b/spec/requests/api/branches_spec.rb
@@ -110,6 +110,15 @@ describe API::Branches do
end
end
+ context 'when the branch refname is invalid' do
+ let(:branch_name) { 'branch*' }
+ let(:message) { 'The branch refname is invalid' }
+
+ it_behaves_like '400 response' do
+ let(:request) { get api(route, current_user) }
+ end
+ end
+
context 'when repository is disabled' do
include_context 'disabled repository'
@@ -234,6 +243,15 @@ describe API::Branches do
end
end
+ context 'when the branch refname is invalid' do
+ let(:branch_name) { 'branch*' }
+ let(:message) { 'The branch refname is invalid' }
+
+ it_behaves_like '400 response' do
+ let(:request) { put api(route, current_user) }
+ end
+ end
+
context 'when repository is disabled' do
include_context 'disabled repository'
@@ -359,6 +377,15 @@ describe API::Branches do
end
end
+ context 'when the branch refname is invalid' do
+ let(:branch_name) { 'branch*' }
+ let(:message) { 'The branch refname is invalid' }
+
+ it_behaves_like '400 response' do
+ let(:request) { put api(route, current_user) }
+ end
+ end
+
context 'when repository is disabled' do
include_context 'disabled repository'
@@ -520,6 +547,15 @@ describe API::Branches do
expect(response).to have_gitlab_http_status(404)
end
+ context 'when the branch refname is invalid' do
+ let(:branch_name) { 'branch*' }
+ let(:message) { 'The branch refname is invalid' }
+
+ it_behaves_like '400 response' do
+ let(:request) { delete api("/projects/#{project.id}/repository/branches/#{branch_name}", user) }
+ end
+ end
+
it_behaves_like '412 response' do
let(:request) { api("/projects/#{project.id}/repository/branches/#{branch_name}", user) }
end
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 0b9a4b5c3db..c24de58ee9d 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -23,6 +23,7 @@ describe API::Settings, 'Settings' do
expect(json_response['dsa_key_restriction']).to eq(0)
expect(json_response['ecdsa_key_restriction']).to eq(0)
expect(json_response['ed25519_key_restriction']).to eq(0)
+ expect(json_response['circuitbreaker_failure_count_threshold']).not_to be_nil
end
end
@@ -52,7 +53,8 @@ describe API::Settings, 'Settings' do
rsa_key_restriction: ApplicationSetting::FORBIDDEN_KEY_VALUE,
dsa_key_restriction: 2048,
ecdsa_key_restriction: 384,
- ed25519_key_restriction: 256
+ ed25519_key_restriction: 256,
+ circuitbreaker_failure_wait_time: 2
expect(response).to have_http_status(200)
expect(json_response['default_projects_limit']).to eq(3)
@@ -73,6 +75,7 @@ describe API::Settings, 'Settings' do
expect(json_response['dsa_key_restriction']).to eq(2048)
expect(json_response['ecdsa_key_restriction']).to eq(384)
expect(json_response['ed25519_key_restriction']).to eq(256)
+ expect(json_response['circuitbreaker_failure_wait_time']).to eq(2)
end
end
diff --git a/spec/requests/api/v3/repositories_spec.rb b/spec/requests/api/v3/repositories_spec.rb
index 1a55e2a71cd..67624a0bbea 100644
--- a/spec/requests/api/v3/repositories_spec.rb
+++ b/spec/requests/api/v3/repositories_spec.rb
@@ -97,10 +97,11 @@ describe API::V3::Repositories do
end
end
- {
- 'blobs/:sha' => 'blobs/master',
- 'commits/:sha/blob' => 'commits/master/blob'
- }.each do |desc_path, example_path|
+ [
+ ['blobs/:sha', 'blobs/master'],
+ ['blobs/:sha', 'blobs/v1.1.0'],
+ ['commits/:sha/blob', 'commits/master/blob']
+ ].each do |desc_path, example_path|
describe "GET /projects/:id/repository/#{desc_path}" do
let(:route) { "/projects/#{project.id}/repository/#{example_path}?filepath=README.md" }
shared_examples_for 'repository blob' do
@@ -110,7 +111,7 @@ describe API::V3::Repositories do
end
context 'when sha does not exist' do
it_behaves_like '404 response' do
- let(:request) { get v3_api(route.sub('master', 'invalid_branch_name'), current_user) }
+ let(:request) { get v3_api("/projects/#{project.id}/repository/#{desc_path.sub(':sha', 'invalid_branch_name')}?filepath=README.md", current_user) }
let(:message) { '404 Commit Not Found' }
end
end
diff --git a/spec/rubocop/cop/rspec/env_assignment_spec.rb b/spec/rubocop/cop/rspec/env_assignment_spec.rb
new file mode 100644
index 00000000000..4e859b6f6fa
--- /dev/null
+++ b/spec/rubocop/cop/rspec/env_assignment_spec.rb
@@ -0,0 +1,59 @@
+require 'spec_helper'
+
+require 'rubocop'
+require 'rubocop/rspec/support'
+
+require_relative '../../../../rubocop/cop/rspec/env_assignment'
+
+describe RuboCop::Cop::RSpec::EnvAssignment do
+ include CopHelper
+
+ OFFENSE_CALL_SINGLE_QUOTES_KEY = %(ENV['FOO'] = 'bar').freeze
+ OFFENSE_CALL_DOUBLE_QUOTES_KEY = %(ENV["FOO"] = 'bar').freeze
+
+ let(:source_file) { 'spec/foo_spec.rb' }
+
+ subject(:cop) { described_class.new }
+
+ shared_examples 'an offensive ENV#[]= call' do |content|
+ it "registers an offense for `#{content}`" do
+ inspect_source(cop, content, source_file)
+
+ expect(cop.offenses.size).to eq(1)
+ expect(cop.offenses.map(&:line)).to eq([1])
+ expect(cop.highlights).to eq([content])
+ end
+ end
+
+ shared_examples 'an autocorrected ENV#[]= call' do |content, autocorrected_content|
+ it "registers an offense for `#{content}` and autocorrects it to `#{autocorrected_content}`" do
+ autocorrected = autocorrect_source(cop, content, source_file)
+
+ expect(autocorrected).to eql(autocorrected_content)
+ end
+ end
+
+ context 'in a spec file' do
+ before do
+ allow(cop).to receive(:in_spec?).and_return(true)
+ end
+
+ context 'with a key using single quotes' do
+ it_behaves_like 'an offensive ENV#[]= call', OFFENSE_CALL_SINGLE_QUOTES_KEY
+ it_behaves_like 'an autocorrected ENV#[]= call', OFFENSE_CALL_SINGLE_QUOTES_KEY, %(stub_env('FOO', 'bar'))
+ end
+
+ context 'with a key using double quotes' do
+ it_behaves_like 'an offensive ENV#[]= call', OFFENSE_CALL_DOUBLE_QUOTES_KEY
+ it_behaves_like 'an autocorrected ENV#[]= call', OFFENSE_CALL_DOUBLE_QUOTES_KEY, %(stub_env("FOO", 'bar'))
+ end
+ end
+
+ context 'outside of a spec file' do
+ it "does not register an offense for `#{OFFENSE_CALL_SINGLE_QUOTES_KEY}` in a non-spec file" do
+ inspect_source(cop, OFFENSE_CALL_SINGLE_QUOTES_KEY)
+
+ expect(cop.offenses.size).to eq(0)
+ end
+ end
+end
diff --git a/spec/serializers/container_tag_entity_spec.rb b/spec/serializers/container_tag_entity_spec.rb
index 6dcc5204516..4beb50c70f8 100644
--- a/spec/serializers/container_tag_entity_spec.rb
+++ b/spec/serializers/container_tag_entity_spec.rb
@@ -22,7 +22,7 @@ describe ContainerTagEntity do
end
it 'exposes required informations' do
- expect(subject).to include(:name, :location, :revision, :total_size, :created_at)
+ expect(subject).to include(:name, :location, :revision, :short_revision, :total_size, :created_at)
end
context 'when user can manage repositories' do
diff --git a/spec/serializers/group_child_entity_spec.rb b/spec/serializers/group_child_entity_spec.rb
new file mode 100644
index 00000000000..452754d7a79
--- /dev/null
+++ b/spec/serializers/group_child_entity_spec.rb
@@ -0,0 +1,101 @@
+require 'spec_helper'
+
+describe GroupChildEntity do
+ include Gitlab::Routing.url_helpers
+
+ let(:user) { create(:user) }
+ let(:request) { double('request') }
+ let(:entity) { described_class.new(object, request: request) }
+ subject(:json) { entity.as_json }
+
+ before do
+ allow(request).to receive(:current_user).and_return(user)
+ end
+
+ shared_examples 'group child json' do
+ it 'renders json' do
+ is_expected.not_to be_nil
+ end
+
+ %w[id
+ full_name
+ avatar_url
+ name
+ description
+ visibility
+ type
+ can_edit
+ visibility
+ permission
+ relative_path].each do |attribute|
+ it "includes #{attribute}" do
+ expect(json[attribute.to_sym]).to be_present
+ end
+ end
+ end
+
+ describe 'for a project' do
+ let(:object) do
+ create(:project, :with_avatar,
+ description: 'Awesomeness')
+ end
+
+ before do
+ object.add_master(user)
+ end
+
+ it 'has the correct type' do
+ expect(json[:type]).to eq('project')
+ end
+
+ it 'includes the star count' do
+ expect(json[:star_count]).to be_present
+ end
+
+ it 'has the correct edit path' do
+ expect(json[:edit_path]).to eq(edit_project_path(object))
+ end
+
+ it_behaves_like 'group child json'
+ end
+
+ describe 'for a group', :nested_groups do
+ let(:object) do
+ create(:group, :nested, :with_avatar,
+ description: 'Awesomeness')
+ end
+
+ before do
+ object.add_owner(user)
+ end
+
+ it 'has the correct type' do
+ expect(json[:type]).to eq('group')
+ end
+
+ it 'counts projects and subgroups as children' do
+ create(:project, namespace: object)
+ create(:group, parent: object)
+
+ expect(json[:children_count]).to eq(2)
+ end
+
+ %w[children_count leave_path parent_id number_projects_with_delimiter number_users_with_delimiter project_count subgroup_count].each do |attribute|
+ it "includes #{attribute}" do
+ expect(json[attribute.to_sym]).to be_present
+ end
+ end
+
+ it 'allows an owner to leave when there is another one' do
+ object.add_owner(create(:user))
+
+ expect(json[:can_leave]).to be_truthy
+ end
+
+ it 'has the correct edit path' do
+ expect(json[:edit_path]).to eq(edit_group_path(object))
+ end
+
+ it_behaves_like 'group child json'
+ end
+end
diff --git a/spec/serializers/group_child_serializer_spec.rb b/spec/serializers/group_child_serializer_spec.rb
new file mode 100644
index 00000000000..5541ada3750
--- /dev/null
+++ b/spec/serializers/group_child_serializer_spec.rb
@@ -0,0 +1,110 @@
+require 'spec_helper'
+
+describe GroupChildSerializer do
+ let(:request) { double('request') }
+ let(:user) { create(:user) }
+ subject(:serializer) { described_class.new(current_user: user) }
+
+ describe '#represent' do
+ context 'for groups' do
+ it 'can render a single group' do
+ expect(serializer.represent(build(:group))).to be_kind_of(Hash)
+ end
+
+ it 'can render a collection of groups' do
+ expect(serializer.represent(build_list(:group, 2))).to be_kind_of(Array)
+ end
+ end
+
+ context 'with a hierarchy', :nested_groups do
+ let(:parent) { create(:group) }
+
+ subject(:serializer) do
+ described_class.new(current_user: user).expand_hierarchy(parent)
+ end
+
+ it 'expands the subgroups' do
+ subgroup = create(:group, parent: parent)
+ subsub_group = create(:group, parent: subgroup)
+
+ json = serializer.represent([subgroup, subsub_group]).first
+ subsub_group_json = json[:children].first
+
+ expect(json[:id]).to eq(subgroup.id)
+ expect(subsub_group_json).not_to be_nil
+ expect(subsub_group_json[:id]).to eq(subsub_group.id)
+ end
+
+ it 'can render a nested tree' do
+ subgroup1 = create(:group, parent: parent)
+ subsub_group1 = create(:group, parent: subgroup1)
+ subgroup2 = create(:group, parent: parent)
+
+ json = serializer.represent([subgroup1, subsub_group1, subgroup1, subgroup2])
+ subgroup1_json = json.first
+ subsub_group1_json = subgroup1_json[:children].first
+
+ expect(json.size).to eq(2)
+ expect(subgroup1_json[:id]).to eq(subgroup1.id)
+ expect(subsub_group1_json[:id]).to eq(subsub_group1.id)
+ end
+
+ context 'without a specified parent' do
+ subject(:serializer) do
+ described_class.new(current_user: user).expand_hierarchy
+ end
+
+ it 'can render a tree' do
+ subgroup = create(:group, parent: parent)
+
+ json = serializer.represent([parent, subgroup])
+ parent_json = json.first
+
+ expect(parent_json[:id]).to eq(parent.id)
+ expect(parent_json[:children].first[:id]).to eq(subgroup.id)
+ end
+ end
+ end
+
+ context 'for projects' do
+ it 'can render a single project' do
+ expect(serializer.represent(build(:project))).to be_kind_of(Hash)
+ end
+
+ it 'can render a collection of projects' do
+ expect(serializer.represent(build_list(:project, 2))).to be_kind_of(Array)
+ end
+
+ context 'with a hierarchy', :nested_groups do
+ let(:parent) { create(:group) }
+
+ subject(:serializer) do
+ described_class.new(current_user: user).expand_hierarchy(parent)
+ end
+
+ it 'can render a nested tree' do
+ subgroup1 = create(:group, parent: parent)
+ project1 = create(:project, namespace: subgroup1)
+ subgroup2 = create(:group, parent: parent)
+ project2 = create(:project, namespace: subgroup2)
+
+ json = serializer.represent([project1, project2, subgroup1, subgroup2])
+ project1_json, project2_json = json.map { |group_json| group_json[:children].first }
+
+ expect(json.size).to eq(2)
+ expect(project1_json[:id]).to eq(project1.id)
+ expect(project2_json[:id]).to eq(project2.id)
+ end
+
+ it 'returns an array when an array of a single instance was given' do
+ project = create(:project, namespace: parent)
+
+ json = serializer.represent([project])
+
+ expect(json).to be_kind_of(Array)
+ expect(json.size).to eq(1)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index 1c2d0b3e0dc..9128280eb5a 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -43,6 +43,21 @@ describe Auth::ContainerRegistryAuthenticationService do
end
end
+ shared_examples 'a browsable' do
+ let(:access) do
+ [{ 'type' => 'registry',
+ 'name' => 'catalog',
+ 'actions' => ['*'] }]
+ end
+
+ it_behaves_like 'a valid token'
+ it_behaves_like 'not a container repository factory'
+
+ it 'has the correct scope' do
+ expect(payload).to include('access' => access)
+ end
+ end
+
shared_examples 'an accessible' do
let(:access) do
[{ 'type' => 'repository',
@@ -51,7 +66,10 @@ describe Auth::ContainerRegistryAuthenticationService do
end
it_behaves_like 'a valid token'
- it { expect(payload).to include('access' => access) }
+
+ it 'has the correct scope' do
+ expect(payload).to include('access' => access)
+ end
end
shared_examples 'an inaccessible' do
@@ -117,6 +135,17 @@ describe Auth::ContainerRegistryAuthenticationService do
context 'user authorization' do
let(:current_user) { create(:user) }
+ context 'for registry catalog' do
+ let(:current_params) do
+ { scope: "registry:catalog:*" }
+ end
+
+ context 'disallow browsing for users without Gitlab admin rights' do
+ it_behaves_like 'an inaccessible'
+ it_behaves_like 'not a container repository factory'
+ end
+ end
+
context 'for private project' do
let(:project) { create(:project) }
@@ -490,6 +519,16 @@ describe Auth::ContainerRegistryAuthenticationService do
end
end
+ context 'registry catalog browsing authorized as admin' do
+ let(:current_user) { create(:user, :admin) }
+
+ let(:current_params) do
+ { scope: "registry:catalog:*" }
+ end
+
+ it_behaves_like 'a browsable'
+ end
+
context 'unauthorized' do
context 'disallow to use scope-less authentication' do
it_behaves_like 'a forbidden'
@@ -536,5 +575,14 @@ describe Auth::ContainerRegistryAuthenticationService do
it_behaves_like 'not a container repository factory'
end
end
+
+ context 'for registry catalog' do
+ let(:current_params) do
+ { scope: "registry:catalog:*" }
+ end
+
+ it_behaves_like 'a forbidden'
+ it_behaves_like 'not a container repository factory'
+ end
end
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 9db3568abee..b61d1cb765e 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -160,8 +160,9 @@ describe Ci::RetryBuildService do
expect(new_build).to be_created
end
- it 'does mark old build as retried' do
+ it 'does mark old build as retried in the database and on the instance' do
expect(new_build).to be_latest
+ expect(build).to be_retried
expect(build.reload).to be_retried
end
end
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
index 23982b9e6e1..0b32c51a16f 100644
--- a/spec/services/merge_requests/conflicts/list_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -35,7 +35,7 @@ describe MergeRequests::Conflicts::ListService do
it 'returns a falsey value when the MR has a missing ref after a force push' do
merge_request = create_merge_request('conflict-resolvable')
service = conflicts_service(merge_request)
- allow(service.conflicts).to receive(:merge_index).and_raise(Rugged::OdbError)
+ allow_any_instance_of(Rugged::Repository).to receive(:merge_commits).and_raise(Rugged::OdbError)
expect(service.can_be_resolved_in_ui?).to be_falsey
end
diff --git a/spec/services/merge_requests/conflicts/resolve_service_spec.rb b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
index a1f7dc44d31..5376083e7f5 100644
--- a/spec/services/merge_requests/conflicts/resolve_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
@@ -107,25 +107,27 @@ describe MergeRequests::Conflicts::ResolveService do
branch_name: 'conflict-start')
end
- def resolve_conflicts
+ subject do
described_class.new(merge_request_from_fork).execute(user, params)
end
it 'gets conflicts from the source project' do
+ # REFACTOR NOTE: We used to test that `project.repository.rugged` wasn't
+ # used in this case, but since the refactor, for simplification,
+ # we always use that repository for read only operations.
expect(forked_project.repository.rugged).to receive(:merge_commits).and_call_original
- expect(project.repository.rugged).not_to receive(:merge_commits)
- resolve_conflicts
+ subject
end
it 'creates a commit with the message' do
- resolve_conflicts
+ subject
expect(merge_request_from_fork.source_branch_head.message).to eq(params[:commit_message])
end
it 'creates a commit with the correct parents' do
- resolve_conflicts
+ subject
expect(merge_request_from_fork.source_branch_head.parents.map(&:id))
.to eq(['404fa3fc7c2c9b5dacff102f353bdf55b1be2813', target_head])
@@ -200,14 +202,19 @@ describe MergeRequests::Conflicts::ResolveService do
}
end
- it 'raises a MissingResolution error' do
+ it 'raises a ResolutionError error' do
expect { service.execute(user, invalid_params) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
end
end
context 'when the content of a file is unchanged' do
- let(:list_service) { MergeRequests::Conflicts::ListService.new(merge_request) }
+ let(:resolver) do
+ MergeRequests::Conflicts::ListService.new(merge_request).conflicts.resolver
+ end
+ let(:regex_conflict) do
+ resolver.conflict_for_path('files/ruby/regex.rb', 'files/ruby/regex.rb')
+ end
let(:invalid_params) do
{
@@ -219,16 +226,16 @@ describe MergeRequests::Conflicts::ResolveService do
}, {
old_path: 'files/ruby/regex.rb',
new_path: 'files/ruby/regex.rb',
- content: list_service.conflicts.file_for_path('files/ruby/regex.rb', 'files/ruby/regex.rb').content
+ content: regex_conflict.content
}
],
commit_message: 'This is a commit message!'
}
end
- it 'raises a MissingResolution error' do
+ it 'raises a ResolutionError error' do
expect { service.execute(user, invalid_params) }
- .to raise_error(Gitlab::Conflict::File::MissingResolution)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
end
end
@@ -246,9 +253,9 @@ describe MergeRequests::Conflicts::ResolveService do
}
end
- it 'raises a MissingFiles error' do
+ it 'raises a ResolutionError error' do
expect { service.execute(user, invalid_params) }
- .to raise_error(described_class::MissingFiles)
+ .to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
end
end
end
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index c90bad46295..0bec2054f50 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -1,6 +1,8 @@
require 'spec_helper'
describe Projects::DestroyService do
+ include ProjectForksHelper
+
let!(:user) { create(:user) }
let!(:project) { create(:project, :repository, namespace: user.namespace) }
let!(:path) { project.repository.path_to_repo }
@@ -212,6 +214,21 @@ describe Projects::DestroyService do
end
end
+ context 'for a forked project with LFS objects' do
+ let(:forked_project) { fork_project(project, user) }
+
+ before do
+ project.lfs_objects << create(:lfs_object)
+ forked_project.forked_project_link.destroy
+ forked_project.reload
+ end
+
+ it 'destroys the fork' do
+ expect { destroy_project(forked_project, user) }
+ .not_to raise_error
+ end
+ end
+
context 'as the root of a fork network' do
let!(:fork_network) { create(:fork_network, root_project: project) }
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index 031366d1825..d4ac1f6ad81 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -52,6 +52,11 @@ describe Projects::UpdatePagesService do
expect(project.pages_deployed?).to be_falsey
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
+
+ # Check that all expected files are extracted
+ %w[index.html zero .hidden/file].each do |filename|
+ expect(File.exist?(File.join(project.public_pages_path, filename))).to be_truthy
+ end
end
it 'limits pages size' do
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index 6926ac85de3..c35177f6ebc 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -207,7 +207,11 @@ describe QuickActions::InterpretService do
it 'populates spend_time: 3600 if content contains /spend 1h' do
_, updates = service.execute(content, issuable)
- expect(updates).to eq(spend_time: { duration: 3600, user: developer })
+ expect(updates).to eq(spend_time: {
+ duration: 3600,
+ user: developer,
+ spent_at: DateTime.now.to_date
+ })
end
end
@@ -215,7 +219,39 @@ describe QuickActions::InterpretService do
it 'populates spend_time: -1800 if content contains /spend -30m' do
_, updates = service.execute(content, issuable)
- expect(updates).to eq(spend_time: { duration: -1800, user: developer })
+ expect(updates).to eq(spend_time: {
+ duration: -1800,
+ user: developer,
+ spent_at: DateTime.now.to_date
+ })
+ end
+ end
+
+ shared_examples 'spend command with valid date' do
+ it 'populates spend time: 1800 with date in date type format' do
+ _, updates = service.execute(content, issuable)
+
+ expect(updates).to eq(spend_time: {
+ duration: 1800,
+ user: developer,
+ spent_at: Date.parse(date)
+ })
+ end
+ end
+
+ shared_examples 'spend command with invalid date' do
+ it 'will not create any note and timelog' do
+ _, updates = service.execute(content, issuable)
+
+ expect(updates).to eq({})
+ end
+ end
+
+ shared_examples 'spend command with future date' do
+ it 'will not create any note and timelog' do
+ _, updates = service.execute(content, issuable)
+
+ expect(updates).to eq({})
end
end
@@ -669,6 +705,22 @@ describe QuickActions::InterpretService do
let(:issuable) { issue }
end
+ it_behaves_like 'spend command with valid date' do
+ let(:date) { '2016-02-02' }
+ let(:content) { "/spend 30m #{date}" }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'spend command with invalid date' do
+ let(:content) { '/spend 30m 17-99-99' }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'spend command with future date' do
+ let(:content) { '/spend 30m 6017-10-10' }
+ let(:issuable) { issue }
+ end
+
it_behaves_like 'empty command' do
let(:content) { '/spend' }
let(:issuable) { issue }
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index cd473c1f388..0a6ab455abe 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -502,20 +502,6 @@ describe SystemNoteService do
end
end
- describe '.cross_reference?' do
- it 'is truthy when text begins with expected text' do
- expect(described_class.cross_reference?('mentioned in something')).to be_truthy
- end
-
- it 'is truthy when text begins with legacy capitalized expected text' do
- expect(described_class.cross_reference?('mentioned in something')).to be_truthy
- end
-
- it 'is falsey when text does not begin with expected text' do
- expect(described_class.cross_reference?('this is a note')).to be_falsey
- end
- end
-
describe '.cross_reference_disallowed?' do
context 'when mentioner is not a MergeRequest' do
it 'is falsey' do
diff --git a/spec/support/board_helpers.rb b/spec/support/board_helpers.rb
new file mode 100644
index 00000000000..507d0432d7f
--- /dev/null
+++ b/spec/support/board_helpers.rb
@@ -0,0 +1,16 @@
+module BoardHelpers
+ def click_card(card)
+ within card do
+ first('.card-number').click
+ end
+
+ wait_for_sidebar
+ end
+
+ def wait_for_sidebar
+ # loop until the CSS transition is complete
+ Timeout.timeout(0.5) do
+ loop until evaluate_script('$(".right-sidebar").outerWidth()') == 290
+ end
+ end
+end
diff --git a/spec/support/features/discussion_comments_shared_example.rb b/spec/support/features/discussion_comments_shared_example.rb
index 4abb847915d..aabc64d972b 100644
--- a/spec/support/features/discussion_comments_shared_example.rb
+++ b/spec/support/features/discussion_comments_shared_example.rb
@@ -128,14 +128,31 @@ shared_examples 'discussion comments' do |resource_name|
end
end
- it 'clicking "Start discussion" will post a discussion' do
- find(submit_selector).click
+ describe 'creating a discussion' do
+ before do
+ find(submit_selector).click
+ find(comments_selector, match: :first)
+ end
+
+ it 'clicking "Start discussion" will post a discussion' do
+ new_comment = all(comments_selector).last
+
+ expect(new_comment).to have_content 'a'
+ expect(new_comment).to have_selector '.discussion'
+ end
+
+ if resource_name == 'merge request'
+ it 'shows resolved discussion when toggled' do
+ click_button "Resolve discussion"
+
+ expect(page).to have_selector('.note-row-1', visible: true)
- find(comments_selector, match: :first)
- new_comment = all(comments_selector).last
+ refresh
+ click_button "Toggle discussion"
- expect(new_comment).to have_content 'a'
- expect(new_comment).to have_selector '.discussion'
+ expect(page).to have_selector('.note-row-1', visible: true)
+ end
+ end
end
if resource_name == 'issue'
diff --git a/spec/support/ldap_helpers.rb b/spec/support/ldap_helpers.rb
index 079f244475c..28d39a32f02 100644
--- a/spec/support/ldap_helpers.rb
+++ b/spec/support/ldap_helpers.rb
@@ -15,10 +15,7 @@ module LdapHelpers
# admin_group: 'my-admin-group'
# )
def stub_ldap_config(messages)
- messages.each do |config, value|
- allow_any_instance_of(::Gitlab::LDAP::Config)
- .to receive(config.to_sym).and_return(value)
- end
+ allow_any_instance_of(::Gitlab::LDAP::Config).to receive_messages(messages)
end
# Stub an LDAP person search and provide the return entry. Specify `nil` for
diff --git a/spec/support/login_helpers.rb b/spec/support/login_helpers.rb
index 3e117530151..4aed40bf22d 100644
--- a/spec/support/login_helpers.rb
+++ b/spec/support/login_helpers.rb
@@ -120,4 +120,16 @@ module LoginHelpers
allow_any_instance_of(Object).to receive(:user_saml_omniauth_authorize_path).and_return('/users/auth/saml')
allow_any_instance_of(Object).to receive(:omniauth_authorize_path).with(:user, "saml").and_return('/users/auth/saml')
end
+
+ def stub_omniauth_config(messages)
+ allow(Gitlab.config.omniauth).to receive_messages(messages)
+ end
+
+ def stub_basic_saml_config
+ allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', args: {} } })
+ end
+
+ def stub_saml_group_config(groups)
+ allow(Gitlab::Saml::Config).to receive_messages({ options: { name: 'saml', groups_attribute: 'groups', external_groups: groups, args: {} } })
+ end
end
diff --git a/spec/support/redis_without_keys.rb b/spec/support/redis_without_keys.rb
new file mode 100644
index 00000000000..6220167dee6
--- /dev/null
+++ b/spec/support/redis_without_keys.rb
@@ -0,0 +1,8 @@
+class Redis
+ ForbiddenCommand = Class.new(StandardError)
+
+ def keys(*args)
+ raise ForbiddenCommand.new("Don't use `Redis#keys` as it iterates over all "\
+ "keys in redis. Use `Redis#scan_each` instead.")
+ end
+end
diff --git a/spec/support/select2_helper.rb b/spec/support/select2_helper.rb
index 515341bc565..0ba44b1b115 100644
--- a/spec/support/select2_helper.rb
+++ b/spec/support/select2_helper.rb
@@ -21,6 +21,7 @@ module Select2Helper
selector = options.fetch(:from)
+ first(selector, visible: false)
if options[:multiple]
execute_script("$('#{selector}').select2('val', ['#{value}']).trigger('change');")
else
diff --git a/spec/support/shared_examples/requests/api/status_shared_examples.rb b/spec/support/shared_examples/requests/api/status_shared_examples.rb
index 7d7f66adeab..0ed917e448a 100644
--- a/spec/support/shared_examples/requests/api/status_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/status_shared_examples.rb
@@ -3,6 +3,8 @@
# Requires an API request:
# let(:request) { get api("/projects/#{project.id}/repository/branches", user) }
shared_examples_for '400 response' do
+ let(:message) { nil }
+
before do
# Fires the request
request
@@ -10,6 +12,10 @@ shared_examples_for '400 response' do
it 'returns 400' do
expect(response).to have_gitlab_http_status(400)
+
+ if message.present?
+ expect(json_response['message']).to eq(message)
+ end
end
end
@@ -26,6 +32,7 @@ end
shared_examples_for '404 response' do
let(:message) { nil }
+
before do
# Fires the request
request
diff --git a/spec/support/slack_mattermost_notifications_shared_examples.rb b/spec/support/slack_mattermost_notifications_shared_examples.rb
index 6accf16bea4..17f3a861ba8 100644
--- a/spec/support/slack_mattermost_notifications_shared_examples.rb
+++ b/spec/support/slack_mattermost_notifications_shared_examples.rb
@@ -76,8 +76,7 @@ RSpec.shared_examples 'slack or mattermost notifications' do
message: "user created page: Awesome wiki_page"
}
- wiki_page_service = WikiPages::CreateService.new(project, user, opts)
- @wiki_page = wiki_page_service.execute
+ @wiki_page = create(:wiki_page, wiki: project.wiki, attrs: opts)
@wiki_page_sample_data = Gitlab::DataBuilder::WikiPage.build(@wiki_page, user, 'create')
end
diff --git a/spec/support/stub_configuration.rb b/spec/support/stub_configuration.rb
index 2dfb4d4a07f..4d448a55978 100644
--- a/spec/support/stub_configuration.rb
+++ b/spec/support/stub_configuration.rb
@@ -43,10 +43,6 @@ module StubConfiguration
messages['default'] ||= Gitlab.config.repositories.storages.default
messages.each do |storage_name, storage_settings|
storage_settings['path'] = TestEnv.repos_path unless storage_settings.key?('path')
- storage_settings['failure_count_threshold'] ||= 10
- storage_settings['failure_wait_time'] ||= 30
- storage_settings['failure_reset_time'] ||= 1800
- storage_settings['storage_timeout'] ||= 5
end
allow(Gitlab.config.repositories).to receive(:storages).and_return(Settingslogic.new(messages))
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 886052d7848..bf2e11bc360 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -4,7 +4,15 @@ require 'rake'
describe 'gitlab:app namespace rake task' do
let(:enable_registry) { true }
- before :all do
+ def tars_glob
+ Dir.glob(File.join(Gitlab.config.backup.path, '*_gitlab_backup.tar'))
+ end
+
+ def backup_tar
+ tars_glob.first
+ end
+
+ before(:all) do
Rake.application.rake_require 'tasks/gitlab/helpers'
Rake.application.rake_require 'tasks/gitlab/backup'
Rake.application.rake_require 'tasks/gitlab/shell'
@@ -19,9 +27,16 @@ describe 'gitlab:app namespace rake task' do
end
before do
+ stub_env('force', 'yes')
+ FileUtils.rm(tars_glob, force: true)
+ reenable_backup_sub_tasks
stub_container_registry_config(enabled: enable_registry)
end
+ after do
+ FileUtils.rm(tars_glob, force: true)
+ end
+
def run_rake_task(task_name)
Rake::Task[task_name].reenable
Rake.application.invoke_task task_name
@@ -34,22 +49,15 @@ describe 'gitlab:app namespace rake task' do
end
describe 'backup_restore' do
- before do
- # avoid writing task output to spec progress
- allow($stdout).to receive :write
- end
-
context 'gitlab version' do
before do
allow(Dir).to receive(:glob).and_return(['1_gitlab_backup.tar'])
- allow(Dir).to receive(:chdir)
allow(File).to receive(:exist?).and_return(true)
allow(Kernel).to receive(:system).and_return(true)
allow(FileUtils).to receive(:cp_r).and_return(true)
allow(FileUtils).to receive(:mv).and_return(true)
allow(Rake::Task["gitlab:shell:setup"])
.to receive(:invoke).and_return(true)
- ENV['force'] = 'yes'
end
let(:gitlab_version) { Gitlab::VERSION }
@@ -58,8 +66,9 @@ describe 'gitlab:app namespace rake task' do
allow(YAML).to receive(:load_file)
.and_return({ gitlab_version: "not #{gitlab_version}" })
- expect { run_rake_task('gitlab:backup:restore') }
- .to raise_error(SystemExit)
+ expect do
+ expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout
+ end.to raise_error(SystemExit)
end
it 'invokes restoration on match' do
@@ -75,44 +84,15 @@ describe 'gitlab:app namespace rake task' do
expect(Rake::Task['gitlab:backup:lfs:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:backup:registry:restore']).to receive(:invoke)
expect(Rake::Task['gitlab:shell:setup']).to receive(:invoke)
- expect { run_rake_task('gitlab:backup:restore') }.not_to raise_error
+ expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout
end
end
end # backup_restore task
describe 'backup' do
- before(:all) do
- ENV['force'] = 'yes'
- end
-
- def tars_glob
- Dir.glob(File.join(Gitlab.config.backup.path, '*_gitlab_backup.tar'))
- end
-
- def create_backup
- FileUtils.rm tars_glob
-
+ before do
# This reconnect makes our project fixture disappear, breaking the restore. Stub it out.
allow(ActiveRecord::Base.connection).to receive(:reconnect!)
-
- # Redirect STDOUT and run the rake task
- orig_stdout = $stdout
- $stdout = StringIO.new
- reenable_backup_sub_tasks
- run_rake_task('gitlab:backup:create')
- reenable_backup_sub_tasks
- $stdout = orig_stdout
-
- @backup_tar = tars_glob.first
- end
-
- def restore_backup
- orig_stdout = $stdout
- $stdout = StringIO.new
- reenable_backup_sub_tasks
- run_rake_task('gitlab:backup:restore')
- reenable_backup_sub_tasks
- $stdout = orig_stdout
end
describe 'backup creation and deletion using custom_hooks' do
@@ -120,27 +100,17 @@ describe 'gitlab:app namespace rake task' do
let(:user_backup_path) { "repositories/#{project.disk_path}" }
before do
- @origin_cd = Dir.pwd
-
- path = File.join(project.repository.path_to_repo, filename)
+ stub_env('SKIP', 'db')
+ path = File.join(project.repository.path_to_repo, 'custom_hooks')
FileUtils.mkdir_p(path)
FileUtils.touch(File.join(path, "dummy.txt"))
-
- ENV["SKIP"] = "db"
- create_backup
- end
-
- after do
- ENV["SKIP"] = ""
- FileUtils.rm(@backup_tar)
- Dir.chdir(@origin_cd)
end
context 'project uses custom_hooks and successfully creates backup' do
- let(:filename) { "custom_hooks" }
-
it 'creates custom_hooks.tar and project bundle' do
- tar_contents, exit_status = Gitlab::Popen.popen(%W{tar -tvf #{@backup_tar}})
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
+ tar_contents, exit_status = Gitlab::Popen.popen(%W{tar -tvf #{backup_tar}})
expect(exit_status).to eq(0)
expect(tar_contents).to match(user_backup_path)
@@ -149,47 +119,43 @@ describe 'gitlab:app namespace rake task' do
end
it 'restores files correctly' do
- restore_backup
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+ expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout
- expect(Dir.entries(File.join(project.repository.path, "custom_hooks"))).to include("dummy.txt")
+ expect(Dir.entries(File.join(project.repository.path, 'custom_hooks'))).to include("dummy.txt")
end
end
end
context 'tar creation' do
- before do
- create_backup
- end
-
- after do
- FileUtils.rm(@backup_tar)
- end
-
context 'archive file permissions' do
it 'sets correct permissions on the tar file' do
- expect(File.exist?(@backup_tar)).to be_truthy
- expect(File::Stat.new(@backup_tar).mode.to_s(8)).to eq('100600')
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
+ expect(File.exist?(backup_tar)).to be_truthy
+ expect(File::Stat.new(backup_tar).mode.to_s(8)).to eq('100600')
end
context 'with custom archive_permissions' do
before do
allow(Gitlab.config.backup).to receive(:archive_permissions).and_return(0651)
- # We created a backup in a before(:all) so it got the default permissions.
- # We now need to do some work to create a _new_ backup file using our stub.
- FileUtils.rm(@backup_tar)
- create_backup
end
it 'uses the custom permissions' do
- expect(File::Stat.new(@backup_tar).mode.to_s(8)).to eq('100651')
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
+ expect(File::Stat.new(backup_tar).mode.to_s(8)).to eq('100651')
end
end
end
it 'sets correct permissions on the tar contents' do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
+ %W{tar -tvf #{backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
)
+
expect(exit_status).to eq(0)
expect(tar_contents).to match('db/')
expect(tar_contents).to match('uploads.tar.gz')
@@ -203,6 +169,8 @@ describe 'gitlab:app namespace rake task' do
end
it 'deletes temp directories' do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
temp_dirs = Dir.glob(
File.join(Gitlab.config.backup.path, '{db,repositories,uploads,builds,artifacts,pages,lfs,registry}')
)
@@ -214,9 +182,12 @@ describe 'gitlab:app namespace rake task' do
let(:enable_registry) { false }
it 'does not create registry.tar.gz' do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{@backup_tar}}
+ %W{tar -tvf #{backup_tar}}
)
+
expect(exit_status).to eq(0)
expect(tar_contents).not_to match('registry.tar.gz')
end
@@ -232,37 +203,33 @@ describe 'gitlab:app namespace rake task' do
}
end
- let(:project_a) { create(:project, :repository, repository_storage: 'default') }
- let(:project_b) { create(:project, :repository, repository_storage: 'test_second_storage') }
-
before do
- FileUtils.mkdir('tmp/tests/default_storage')
- FileUtils.mkdir('tmp/tests/custom_storage')
+ # We only need a backup of the repositories for this test
+ stub_env('SKIP', 'db,uploads,builds,artifacts,lfs,registry')
+ FileUtils.mkdir(Settings.absolute('tmp/tests/default_storage'))
+ FileUtils.mkdir(Settings.absolute('tmp/tests/custom_storage'))
allow(Gitlab.config.repositories).to receive(:storages).and_return(storages)
- # Create the projects now, after mocking the settings but before doing the backup
- project_a
- project_b
-
# Avoid asking gitaly about the root ref (which will fail beacuse of the
# mocked storages)
allow_any_instance_of(Repository).to receive(:empty_repo?).and_return(false)
-
- # We only need a backup of the repositories for this test
- ENV["SKIP"] = "db,uploads,builds,artifacts,lfs,registry"
- create_backup
end
after do
- FileUtils.rm_rf('tmp/tests/default_storage')
- FileUtils.rm_rf('tmp/tests/custom_storage')
- FileUtils.rm(@backup_tar) if @backup_tar
+ FileUtils.rm_rf(Settings.absolute('tmp/tests/default_storage'))
+ FileUtils.rm_rf(Settings.absolute('tmp/tests/custom_storage'))
end
it 'includes repositories in all repository storages' do
+ project_a = create(:project, :repository, repository_storage: 'default')
+ project_b = create(:project, :repository, repository_storage: 'test_second_storage')
+
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
tar_contents, exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{@backup_tar} repositories}
+ %W{tar -tvf #{backup_tar} repositories}
)
+
expect(exit_status).to eq(0)
expect(tar_contents).to match("repositories/#{project_a.disk_path}.bundle")
expect(tar_contents).to match("repositories/#{project_b.disk_path}.bundle")
@@ -271,35 +238,15 @@ describe 'gitlab:app namespace rake task' do
end # backup_create task
describe "Skipping items" do
- def tars_glob
- Dir.glob(File.join(Gitlab.config.backup.path, '*_gitlab_backup.tar'))
- end
-
- before :all do
- @origin_cd = Dir.pwd
-
- reenable_backup_sub_tasks
-
- FileUtils.rm tars_glob
-
- # Redirect STDOUT and run the rake task
- orig_stdout = $stdout
- $stdout = StringIO.new
- ENV["SKIP"] = "repositories,uploads"
- run_rake_task('gitlab:backup:create')
- $stdout = orig_stdout
-
- @backup_tar = tars_glob.first
- end
-
- after :all do
- FileUtils.rm(@backup_tar)
- Dir.chdir @origin_cd
+ before do
+ stub_env('SKIP', 'repositories,uploads')
end
it "does not contain skipped item" do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
tar_contents, _exit_status = Gitlab::Popen.popen(
- %W{tar -tvf #{@backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
+ %W{tar -tvf #{backup_tar} db uploads.tar.gz repositories builds.tar.gz artifacts.tar.gz pages.tar.gz lfs.tar.gz registry.tar.gz}
)
expect(tar_contents).to match('db/')
@@ -313,9 +260,10 @@ describe 'gitlab:app namespace rake task' do
end
it 'does not invoke repositories restore' do
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
allow(Rake::Task['gitlab:shell:setup'])
.to receive(:invoke).and_return(true)
- allow($stdout).to receive :write
expect(Rake::Task['gitlab:db:drop_tables']).to receive :invoke
expect(Rake::Task['gitlab:backup:db:restore']).to receive :invoke
@@ -327,38 +275,15 @@ describe 'gitlab:app namespace rake task' do
expect(Rake::Task['gitlab:backup:lfs:restore']).to receive :invoke
expect(Rake::Task['gitlab:backup:registry:restore']).to receive :invoke
expect(Rake::Task['gitlab:shell:setup']).to receive :invoke
- expect { run_rake_task('gitlab:backup:restore') }.not_to raise_error
+ expect { run_rake_task('gitlab:backup:restore') }.to output.to_stdout
end
end
describe "Human Readable Backup Name" do
- def tars_glob
- Dir.glob(File.join(Gitlab.config.backup.path, '*_gitlab_backup.tar'))
- end
-
- before :all do
- @origin_cd = Dir.pwd
-
- reenable_backup_sub_tasks
-
- FileUtils.rm tars_glob
-
- # Redirect STDOUT and run the rake task
- orig_stdout = $stdout
- $stdout = StringIO.new
- run_rake_task('gitlab:backup:create')
- $stdout = orig_stdout
-
- @backup_tar = tars_glob.first
- end
-
- after :all do
- FileUtils.rm(@backup_tar)
- Dir.chdir @origin_cd
- end
-
it 'name has human readable time' do
- expect(@backup_tar).to match(/\d+_\d{4}_\d{2}_\d{2}_\d+\.\d+\.\d+.*_gitlab_backup.tar$/)
+ expect { run_rake_task('gitlab:backup:create') }.to output.to_stdout
+
+ expect(backup_tar).to match(/\d+_\d{4}_\d{2}_\d{2}_\d+\.\d+\.\d+.*_gitlab_backup.tar$/)
end
end
end # gitlab:app namespace
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index 1e9b20435ec..5dd8fe8eaa5 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -43,15 +43,8 @@ describe 'gitlab:gitaly namespace rake task' do
describe 'gmake/make' do
let(:command_preamble) { %w[/usr/bin/env -u RUBYOPT -u BUNDLE_GEMFILE] }
- before(:all) do
- @old_env_ci = ENV.delete('CI')
- end
-
- after(:all) do
- ENV['CI'] = @old_env_ci if @old_env_ci
- end
-
before do
+ stub_env('CI', false)
FileUtils.mkdir_p(clone_path)
expect(Dir).to receive(:chdir).with(clone_path).and_call_original
allow(Bundler).to receive(:bundle_path).and_return('/fake/bundle_path')
diff --git a/spec/tasks/gitlab/ldap_rake_spec.rb b/spec/tasks/gitlab/ldap_rake_spec.rb
index 12d442b9820..279234f2887 100644
--- a/spec/tasks/gitlab/ldap_rake_spec.rb
+++ b/spec/tasks/gitlab/ldap_rake_spec.rb
@@ -4,7 +4,7 @@ describe 'gitlab:ldap:rename_provider rake task' do
it 'completes without error' do
Rake.application.rake_require 'tasks/gitlab/ldap'
stub_warn_user_is_not_gitlab
- ENV['force'] = 'yes'
+ stub_env('force', 'yes')
create(:identity) # Necessary to prevent `exit 1` from the task.