Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/projects/branches_controller_spec.rb14
-rw-r--r--spec/controllers/projects/services_controller_spec.rb36
-rw-r--r--spec/factories/keys.rb63
-rw-r--r--spec/factories/services.rb1
-rw-r--r--spec/features/issues/filtered_search/dropdown_hint_spec.rb5
-rw-r--r--spec/features/password_reset_spec.rb19
-rw-r--r--spec/features/profiles/keys_spec.rb1
-rw-r--r--spec/features/profiles/oauth_applications_spec.rb8
-rw-r--r--spec/features/projects/clusters/interchangeability_spec.rb2
-rw-r--r--spec/features/runners_spec.rb20
-rw-r--r--spec/features/u2f_spec.rb2
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_metrics.json21
-rw-r--r--spec/fixtures/api/schemas/entities/merge_request_widget.json8
-rw-r--r--spec/fixtures/api/schemas/entities/user.json17
-rw-r--r--spec/javascripts/blob/notebook/index_spec.js48
-rw-r--r--spec/javascripts/boards/board_blank_state_spec.js23
-rw-r--r--spec/javascripts/boards/board_card_spec.js13
-rw-r--r--spec/javascripts/boards/board_list_spec.js14
-rw-r--r--spec/javascripts/boards/board_new_issue_spec.js23
-rw-r--r--spec/javascripts/boards/boards_store_spec.js15
-rw-r--r--spec/javascripts/boards/components/board_spec.js3
-rw-r--r--spec/javascripts/boards/issue_card_spec.js3
-rw-r--r--spec/javascripts/boards/issue_spec.js3
-rw-r--r--spec/javascripts/boards/list_spec.js21
-rw-r--r--spec/javascripts/boards/mock_data.js35
-rw-r--r--spec/javascripts/filtered_search/filtered_search_manager_spec.js14
-rw-r--r--spec/javascripts/issue_show/components/app_spec.js123
-rw-r--r--spec/javascripts/issue_show/mock_data.js10
-rw-r--r--spec/javascripts/job_spec.js8
-rw-r--r--spec/javascripts/repo/components/repo_file_spec.js15
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js6
-rw-r--r--spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js4
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js13
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js10
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js17
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js13
-rw-r--r--spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js4
-rw-r--r--spec/javascripts/vue_mr_widget/mock_data.js9
-rw-r--r--spec/javascripts/vue_mr_widget/mr_widget_options_spec.js5
-rw-r--r--spec/javascripts/vue_shared/components/file_icon_spec.js83
-rw-r--r--spec/javascripts/vue_shared/components/panel_resizer_spec.js59
-rw-r--r--spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb124
-rw-r--r--spec/lib/gitlab/bare_repository_import/importer_spec.rb23
-rw-r--r--spec/lib/gitlab/bare_repository_import/repository_spec.rb130
-rw-r--r--spec/lib/gitlab/ci/ansi2html_spec.rb4
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb91
-rw-r--r--spec/lib/gitlab/encoding_helper_spec.rb14
-rw-r--r--spec/lib/gitlab/git/commit_spec.rb5
-rw-r--r--spec/lib/gitlab/git/gitlab_projects_spec.rb72
-rw-r--r--spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb90
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb14
-rw-r--r--spec/lib/gitlab/search_results_spec.rb35
-rw-r--r--spec/lib/gitlab/visibility_level_spec.rb27
-rw-r--r--spec/migrations/clean_up_for_members_spec.rb78
-rw-r--r--spec/migrations/issues_moved_to_id_foreign_key_spec.rb25
-rw-r--r--spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb24
-rw-r--r--spec/models/diff_discussion_spec.rb48
-rw-r--r--spec/models/issue_spec.rb26
-rw-r--r--spec/models/merge_request/metrics_spec.rb15
-rw-r--r--spec/models/merge_request_spec.rb19
-rw-r--r--spec/models/namespace_spec.rb16
-rw-r--r--spec/models/project_services/kubernetes_service_spec.rb101
-rw-r--r--spec/models/project_spec.rb52
-rw-r--r--spec/models/service_spec.rb18
-rw-r--r--spec/models/user_spec.rb4
-rw-r--r--spec/requests/api/notes_spec.rb2
-rw-r--r--spec/requests/api/project_milestones_spec.rb2
-rw-r--r--spec/requests/api/services_spec.rb8
-rw-r--r--spec/requests/api/v3/milestones_spec.rb2
-rw-r--r--spec/requests/api/v3/notes_spec.rb2
-rw-r--r--spec/requests/api/v3/services_spec.rb4
-rw-r--r--spec/requests/api/wikis_spec.rb30
-rw-r--r--spec/serializers/event_entity_spec.rb13
-rw-r--r--spec/serializers/merge_request_widget_entity_spec.rb75
-rw-r--r--spec/services/create_deployment_service_spec.rb2
-rw-r--r--spec/services/merge_requests/close_service_spec.rb13
-rw-r--r--spec/services/merge_requests/conflicts/list_service_spec.rb26
-rw-r--r--spec/services/merge_requests/conflicts/resolve_service_spec.rb20
-rw-r--r--spec/services/merge_requests/post_merge_service_spec.rb13
-rw-r--r--spec/services/merge_requests/reopen_service_spec.rb13
-rw-r--r--spec/services/projects/create_service_spec.rb6
-rw-r--r--spec/services/projects/fork_service_spec.rb17
-rw-r--r--spec/services/projects/hashed_storage/migrate_repository_service_spec.rb8
-rw-r--r--spec/services/projects/transfer_service_spec.rb12
-rw-r--r--spec/services/projects/update_service_spec.rb2
-rw-r--r--spec/services/system_note_service_spec.rb32
-rw-r--r--spec/services/update_merge_request_metrics_service_spec.rb42
-rw-r--r--spec/services/users/destroy_service_spec.rb17
-rw-r--r--spec/support/services_shared_context.rb8
-rw-r--r--spec/support/test_env.rb2
90 files changed, 1788 insertions, 449 deletions
diff --git a/spec/controllers/projects/branches_controller_spec.rb b/spec/controllers/projects/branches_controller_spec.rb
index 91894661ccb..734396ddf7b 100644
--- a/spec/controllers/projects/branches_controller_spec.rb
+++ b/spec/controllers/projects/branches_controller_spec.rb
@@ -148,6 +148,20 @@ describe Projects::BranchesController do
end
end
+ context 'when create branch service fails' do
+ let(:branch) { "./invalid-branch-name" }
+
+ it "doesn't post a system note" do
+ expect(SystemNoteService).not_to receive(:new_issue_branch)
+
+ post :create,
+ namespace_id: project.namespace,
+ project_id: project,
+ branch_name: branch,
+ issue_iid: issue.iid
+ end
+ end
+
context 'without issue feature access' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index 2c6ad00515e..847ac6f2be0 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -114,5 +114,41 @@ describe Projects::ServicesController do
expect(flash[:notice]).to eq 'HipChat settings saved, but not activated.'
end
end
+
+ context 'with a deprecated service' do
+ let(:service) { create(:kubernetes_service, project: project) }
+
+ before do
+ put :update,
+ namespace_id: project.namespace, project_id: project, id: service.to_param, service: { namespace: 'updated_namespace' }
+ end
+
+ it 'should not update the service' do
+ service.reload
+ expect(service.namespace).not_to eq('updated_namespace')
+ end
+ end
+ end
+
+ describe "GET #edit" do
+ before do
+ get :edit, namespace_id: project.namespace, project_id: project, id: service_id
+ end
+
+ context 'with approved services' do
+ let(:service_id) { 'jira' }
+
+ it 'should render edit page' do
+ expect(response).to be_success
+ end
+ end
+
+ context 'with a deprecated service' do
+ let(:service_id) { 'kubernetes' }
+
+ it 'should render edit page' do
+ expect(response).to be_success
+ end
+ end
end
end
diff --git a/spec/factories/keys.rb b/spec/factories/keys.rb
index e6eb76f71d3..552b4b7e06e 100644
--- a/spec/factories/keys.rb
+++ b/spec/factories/keys.rb
@@ -21,12 +21,14 @@ FactoryBot.define do
factory :rsa_key_2048 do
key do
- 'ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFf6RYK3qu/RKF/3ndJmL5xgMLp3O9' \
- '6x8lTay+QGZ0+9FnnAXMdUqBq/ZU6d/gyMB4IaW3nHzM1w049++yAB6UPCzMB8Uo27K5' \
- '/jyZCtj7Vm9PFNjF/8am1kp46c/SeYicQgQaSBdzIW3UDEa1Ef68qroOlvpi9PYZ/tA7' \
- 'M0YP0K5PXX+E36zaIRnJVMPT3f2k+GnrxtjafZrwFdpOP/Fol5BQLBgcsyiU+LM1SuaC' \
- 'rzd8c9vyaTA1CxrkxaZh+buAi0PmdDtaDrHd42gqZkXCKavyvgM5o2CkQ5LJHCgzpXy0' \
- '5qNFzmThBSkb+XtoxbyagBiGbVZtSVow6Xa7qewz= dummy@gitlab.com'
+ <<~KEY.delete("\n")
+ ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDFf6RYK3qu/RKF/3ndJmL5xgMLp3O9
+ 6x8lTay+QGZ0+9FnnAXMdUqBq/ZU6d/gyMB4IaW3nHzM1w049++yAB6UPCzMB8Uo27K5
+ /jyZCtj7Vm9PFNjF/8am1kp46c/SeYicQgQaSBdzIW3UDEa1Ef68qroOlvpi9PYZ/tA7
+ M0YP0K5PXX+E36zaIRnJVMPT3f2k+GnrxtjafZrwFdpOP/Fol5BQLBgcsyiU+LM1SuaC
+ rzd8c9vyaTA1CxrkxaZh+buAi0PmdDtaDrHd42gqZkXCKavyvgM5o2CkQ5LJHCgzpXy0
+ 5qNFzmThBSkb+XtoxbyagBiGbVZtSVow6Xa7qewz= dummy@gitlab.com
+ KEY
end
factory :rsa_deploy_key_2048, class: 'DeployKey'
@@ -34,37 +36,44 @@ FactoryBot.define do
factory :dsa_key_2048 do
key do
- 'ssh-dss AAAAB3NzaC1kc3MAAAEBAO/3/NPLA/zSFkMOCaTtGo+uos1flfQ5f038Uk+G' \
- 'Y9AeLGzX+Srhw59GdVXmOQLYBrOt5HdGwqYcmLnE2VurUGmhtfeO5H+3p5pGJbkS0Gxp' \
- 'YH1HRO9lWsncF3Hh1w4lYsDjkclDiSTdfTuN8F4Kb3DXNnVSCieeonp+B25F/CXagyTQ' \
- '/pvNmHFeYgGCVdnBtFdi+xfxaZ8NKdPrGggzokbKHElDZQ4Xo5EpdcyLajgM7nB2r2Rz' \
- 'OrmeaevKi5lV68ehRa9Yyrb7vxvwiwBwOgqR/mnN7Gnaq1jUdmJY+ct04Qwx37f5jvhv' \
- '5gA4U40SGMoiHM8RFIN7Ksz0jsyX73MAAAAVALRWOfjfzHpK7KLz4iqDvvTUAevJAAAB' \
- 'AEa9NZ+6y9iQ5erGsdfLTXFrhSefTG0NhghoO/5IFkSGfd8V7kzTvCHaFrcfpEA5kP8t' \
- 'poeOG0TASB6tgGOxm1Bq4Wncry5RORBPJlAVpDGRcvZ931ddH7IgltEInS6za2uH6F/1' \
- 'M1QfKePSLr6xJ1ZLYfP0Og5KTp1x6yMQvfwV0a+XdA+EPgaJWLWp/pWwKWa0oLUgjsIH' \
- 'MYzuOGh5c708uZrmkzqvgtW2NgXhcIroRgynT3IfI2lP2rqqb3uuuE/qH5UCUFO+Dc3H' \
- 'nAFNeQDT/M25AERdPYBAY5a+iPjIgO+jT7BfmfByT+AZTqZySrCyc7nNZL3YgGLK0l6A' \
- '1GgAAAEBAN9FpFOdIXE+YEZhKl1vPmbcn+b1y5zOl6N4x1B7Q8pD/pLMziWROIS8uLzb' \
- 'aZ0sMIWezHIkxuo1iROMeT+jtCubn7ragaN6AX7nMpxYUH9+mYZZs/fyElt6wCviVhTI' \
- 'zM+u7VdQsnZttOOlQfogHdL+SpeAft0DsfJjlcgQnsLlHQKv6aPqCPYUST2nE7RyW/Ex' \
- 'PrMxLtOWt0/j8RYHbwwqvyeZqBz3ESBgrS9c5tBdBfauwYUV/E7gPLOU3OZFw9ue7o+z' \
- 'wzoTZqW6Xouy5wtWvSLQSLT5XwOslmQz8QMBxD0AQyDfEFGsBCWzmbTgKv9uqrBjubsS' \
- 'Taja+Cf9kMo== dummy@gitlab.com'
+ <<~KEY.delete("\n")
+ ssh-dss AAAAB3NzaC1kc3MAAAEBAO/3/NPLA/zSFkMOCaTtGo+uos1flfQ5f038Uk+G
+ Y9AeLGzX+Srhw59GdVXmOQLYBrOt5HdGwqYcmLnE2VurUGmhtfeO5H+3p5pGJbkS0Gxp
+ YH1HRO9lWsncF3Hh1w4lYsDjkclDiSTdfTuN8F4Kb3DXNnVSCieeonp+B25F/CXagyTQ
+ /pvNmHFeYgGCVdnBtFdi+xfxaZ8NKdPrGggzokbKHElDZQ4Xo5EpdcyLajgM7nB2r2Rz
+ OrmeaevKi5lV68ehRa9Yyrb7vxvwiwBwOgqR/mnN7Gnaq1jUdmJY+ct04Qwx37f5jvhv
+ 5gA4U40SGMoiHM8RFIN7Ksz0jsyX73MAAAAVALRWOfjfzHpK7KLz4iqDvvTUAevJAAAB
+ AEa9NZ+6y9iQ5erGsdfLTXFrhSefTG0NhghoO/5IFkSGfd8V7kzTvCHaFrcfpEA5kP8t
+ poeOG0TASB6tgGOxm1Bq4Wncry5RORBPJlAVpDGRcvZ931ddH7IgltEInS6za2uH6F/1
+ M1QfKePSLr6xJ1ZLYfP0Og5KTp1x6yMQvfwV0a+XdA+EPgaJWLWp/pWwKWa0oLUgjsIH
+ MYzuOGh5c708uZrmkzqvgtW2NgXhcIroRgynT3IfI2lP2rqqb3uuuE/qH5UCUFO+Dc3H
+ nAFNeQDT/M25AERdPYBAY5a+iPjIgO+jT7BfmfByT+AZTqZySrCyc7nNZL3YgGLK0l6A
+ 1GgAAAEBAN9FpFOdIXE+YEZhKl1vPmbcn+b1y5zOl6N4x1B7Q8pD/pLMziWROIS8uLzb
+ aZ0sMIWezHIkxuo1iROMeT+jtCubn7ragaN6AX7nMpxYUH9+mYZZs/fyElt6wCviVhTI
+ zM+u7VdQsnZttOOlQfogHdL+SpeAft0DsfJjlcgQnsLlHQKv6aPqCPYUST2nE7RyW/Ex
+ PrMxLtOWt0/j8RYHbwwqvyeZqBz3ESBgrS9c5tBdBfauwYUV/E7gPLOU3OZFw9ue7o+z
+ wzoTZqW6Xouy5wtWvSLQSLT5XwOslmQz8QMBxD0AQyDfEFGsBCWzmbTgKv9uqrBjubsS
+ Taja+Cf9kMo== dummy@gitlab.com
+ KEY
end
end
factory :ecdsa_key_256 do
key do
- 'ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYA' \
- 'AABBBJZmkzTgY0fiCQ+DVReyH/fFwTFz0XoR3RUO0u+199H19KFw7mNPxRSMOVS7tEtO' \
- 'Nj3Q7FcZXfqthHvgAzDiHsc= dummy@gitlab.com'
+ <<~KEY.delete("\n")
+ ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYA
+ AABBBJZmkzTgY0fiCQ+DVReyH/fFwTFz0XoR3RUO0u+199H19KFw7mNPxRSMOVS7tEtO
+ Nj3Q7FcZXfqthHvgAzDiHsc= dummy@gitlab.com
+ KEY
end
end
factory :ed25519_key_256 do
key do
- 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIETnVTgzqC1gatgSlC4zH6aYt2CAQzgJOhDRvf59ohL6 dummy@gitlab.com'
+ <<~KEY.delete("\n")
+ ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIETnVTgzqC1gatgSlC4zH6aYt2CAQzgJ
+ OhDRvf59ohL6 dummy@gitlab.com
+ KEY
end
end
end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 4b0377967c7..110ef33c6f7 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -18,6 +18,7 @@ FactoryBot.define do
factory :kubernetes_service do
project
+ type 'KubernetesService'
active true
properties({
api_url: 'https://kubernetes.example.com',
diff --git a/spec/features/issues/filtered_search/dropdown_hint_spec.rb b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
index 18ae45aa340..ef40dddfd3a 100644
--- a/spec/features/issues/filtered_search/dropdown_hint_spec.rb
+++ b/spec/features/issues/filtered_search/dropdown_hint_spec.rb
@@ -176,6 +176,7 @@ describe 'Dropdown hint', :js do
it 'reuses existing author text' do
filtered_search.send_keys('author:')
filtered_search.send_keys(:backspace)
+ filtered_search.send_keys(:backspace)
click_hint('author')
expect_tokens([{ name: 'author' }])
@@ -185,6 +186,7 @@ describe 'Dropdown hint', :js do
it 'reuses existing assignee text' do
filtered_search.send_keys('assignee:')
filtered_search.send_keys(:backspace)
+ filtered_search.send_keys(:backspace)
click_hint('assignee')
expect_tokens([{ name: 'assignee' }])
@@ -194,6 +196,7 @@ describe 'Dropdown hint', :js do
it 'reuses existing milestone text' do
filtered_search.send_keys('milestone:')
filtered_search.send_keys(:backspace)
+ filtered_search.send_keys(:backspace)
click_hint('milestone')
expect_tokens([{ name: 'milestone' }])
@@ -203,6 +206,7 @@ describe 'Dropdown hint', :js do
it 'reuses existing label text' do
filtered_search.send_keys('label:')
filtered_search.send_keys(:backspace)
+ filtered_search.send_keys(:backspace)
click_hint('label')
expect_tokens([{ name: 'label' }])
@@ -212,6 +216,7 @@ describe 'Dropdown hint', :js do
it 'reuses existing emoji text' do
filtered_search.send_keys('my-reaction:')
filtered_search.send_keys(:backspace)
+ filtered_search.send_keys(:backspace)
click_hint('my-reaction')
expect_tokens([{ name: 'my-reaction' }])
diff --git a/spec/features/password_reset_spec.rb b/spec/features/password_reset_spec.rb
index b45972b7f6b..73a526c3d8a 100644
--- a/spec/features/password_reset_spec.rb
+++ b/spec/features/password_reset_spec.rb
@@ -33,6 +33,25 @@ feature 'Password reset' do
end
end
+ describe 'Changing password while logged in' do
+ it 'updates the password' do
+ user = create(:user)
+ token = user.send_reset_password_instructions
+
+ sign_in(user)
+
+ visit(edit_user_password_path(reset_password_token: token))
+
+ fill_in 'New password', with: 'hello1234'
+ fill_in 'Confirm new password', with: 'hello1234'
+
+ click_button 'Change your password'
+
+ expect(page).to have_content(I18n.t('devise.passwords.updated_not_active'))
+ expect(current_path).to eq new_user_session_path
+ end
+ end
+
def forgot_password(user)
visit root_path
click_on 'Forgot your password?'
diff --git a/spec/features/profiles/keys_spec.rb b/spec/features/profiles/keys_spec.rb
index 7d5ba3a7328..b04a5422fed 100644
--- a/spec/features/profiles/keys_spec.rb
+++ b/spec/features/profiles/keys_spec.rb
@@ -27,6 +27,7 @@ feature 'Profile > SSH Keys' do
expect(page).to have_content("Title: #{attrs[:title]}")
expect(page).to have_content(attrs[:key])
+ expect(find('.breadcrumbs-sub-title')).to have_link(attrs[:title])
end
context 'when only DSA and ECDSA keys are allowed' do
diff --git a/spec/features/profiles/oauth_applications_spec.rb b/spec/features/profiles/oauth_applications_spec.rb
index d1edeef8da4..7d204f89fba 100644
--- a/spec/features/profiles/oauth_applications_spec.rb
+++ b/spec/features/profiles/oauth_applications_spec.rb
@@ -2,12 +2,20 @@ require 'spec_helper'
describe 'Profile > Applications' do
let(:user) { create(:user) }
+ let(:application) { create(:oauth_application, owner: user) }
before do
sign_in(user)
end
describe 'User manages applications', :js do
+ it 'views an application' do
+ visit oauth_application_path(application)
+
+ expect(page).to have_content("Application: #{application.name}")
+ expect(find('.breadcrumbs-sub-title')).to have_link(application.name)
+ end
+
it 'deletes an application' do
create(:oauth_application, owner: user)
visit oauth_applications_path
diff --git a/spec/features/projects/clusters/interchangeability_spec.rb b/spec/features/projects/clusters/interchangeability_spec.rb
index 01f9526608f..3ddb35c755c 100644
--- a/spec/features/projects/clusters/interchangeability_spec.rb
+++ b/spec/features/projects/clusters/interchangeability_spec.rb
@@ -1,7 +1,7 @@
require 'spec_helper'
feature 'Interchangeability between KubernetesService and Platform::Kubernetes' do
- EXCEPT_METHODS = %i[test title description help fields initialize_properties namespace namespace= api_url api_url=].freeze
+ EXCEPT_METHODS = %i[test title description help fields initialize_properties namespace namespace= api_url api_url= deprecated? deprecation_message].freeze
EXCEPT_METHODS_GREP_V = %w[_touched? _changed? _was].freeze
it 'Clusters::Platform::Kubernetes covers core interfaces in KubernetesService' do
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index c7f0e342809..aec9de6c7ca 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -33,6 +33,26 @@ feature 'Runners' do
expect(page).to have_content(specific_runner.platform)
end
+ scenario 'user can pause and resume the specific runner' do
+ visit runners_path(project)
+
+ within '.activated-specific-runners' do
+ expect(page).to have_content('Pause')
+ end
+
+ click_on 'Pause'
+
+ within '.activated-specific-runners' do
+ expect(page).to have_content('Resume')
+ end
+
+ click_on 'Resume'
+
+ within '.activated-specific-runners' do
+ expect(page).to have_content('Pause')
+ end
+ end
+
scenario 'user removes an activated specific runner if this is last project for that runners' do
visit runners_path(project)
diff --git a/spec/features/u2f_spec.rb b/spec/features/u2f_spec.rb
index c9afef2a8de..50ee1656e10 100644
--- a/spec/features/u2f_spec.rb
+++ b/spec/features/u2f_spec.rb
@@ -264,7 +264,7 @@ feature 'Using U2F (Universal 2nd Factor) Devices for Authentication', :js do
end
it "deletes u2f registrations" do
- visit profile_account_path
+ visit profile_two_factor_auth_path
expect do
accept_confirm { click_on "Disable" }
end.to change { U2fRegistration.count }.by(-1)
diff --git a/spec/fixtures/api/schemas/entities/merge_request_metrics.json b/spec/fixtures/api/schemas/entities/merge_request_metrics.json
new file mode 100644
index 00000000000..3fa767f85df
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/merge_request_metrics.json
@@ -0,0 +1,21 @@
+{
+ "type": "object",
+ "required": ["closed_at", "merged_at", "closed_by", "merged_by"],
+ "properties" : {
+ "closed_at": { "type": ["datetime", "null"] },
+ "merged_at": { "type": ["datetime", "null"] },
+ "closed_by": {
+ "oneOf": [
+ { "type": "null" },
+ { "$ref": "user.json" }
+ ]
+ },
+ "merged_by": {
+ "oneOf": [
+ { "type": "null" },
+ { "$ref": "user.json" }
+ ]
+ }
+ },
+ "additionalProperties": false
+}
diff --git a/spec/fixtures/api/schemas/entities/merge_request_widget.json b/spec/fixtures/api/schemas/entities/merge_request_widget.json
index 342890c3dee..9de27bee751 100644
--- a/spec/fixtures/api/schemas/entities/merge_request_widget.json
+++ b/spec/fixtures/api/schemas/entities/merge_request_widget.json
@@ -31,8 +31,12 @@
"source_project_id": { "type": "integer" },
"target_branch": { "type": "string" },
"target_project_id": { "type": "integer" },
- "merge_event": { "type": ["object", "null"] },
- "closed_event": { "type": ["object", "null"] },
+ "metrics": {
+ "oneOf": [
+ { "type": "null" },
+ { "$ref": "merge_request_metrics.json" }
+ ]
+ },
"author": { "type": ["object", "null"] },
"merge_user": { "type": ["object", "null"] },
"diff_head_sha": { "type": ["string", "null"] },
diff --git a/spec/fixtures/api/schemas/entities/user.json b/spec/fixtures/api/schemas/entities/user.json
new file mode 100644
index 00000000000..6482e0eedd2
--- /dev/null
+++ b/spec/fixtures/api/schemas/entities/user.json
@@ -0,0 +1,17 @@
+{
+ "type": "object",
+ "required": [
+ "id",
+ "state",
+ "avatar_url",
+ "web_url",
+ "path"
+ ],
+ "properties": {
+ "id": { "type": "integer" },
+ "state": { "type": "string" },
+ "avatar_url": { "type": "string" },
+ "web_url": { "type": "string" },
+ "path": { "type": "string" }
+ }
+}
diff --git a/spec/javascripts/blob/notebook/index_spec.js b/spec/javascripts/blob/notebook/index_spec.js
index c3e67550f05..df1b2c9960b 100644
--- a/spec/javascripts/blob/notebook/index_spec.js
+++ b/spec/javascripts/blob/notebook/index_spec.js
@@ -1,4 +1,5 @@
-import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import renderNotebook from '~/blob/notebook';
describe('iPython notebook renderer', () => {
@@ -17,8 +18,11 @@ describe('iPython notebook renderer', () => {
});
describe('successful response', () => {
- const response = (request, next) => {
- next(request.respondWith(JSON.stringify({
+ let mock;
+
+ beforeEach((done) => {
+ mock = new MockAdapter(axios);
+ mock.onGet('/test').reply(200, {
cells: [{
cell_type: 'markdown',
source: ['# test'],
@@ -31,13 +35,7 @@ describe('iPython notebook renderer', () => {
],
outputs: [],
}],
- }), {
- status: 200,
- }));
- };
-
- beforeEach((done) => {
- Vue.http.interceptors.push(response);
+ });
renderNotebook();
@@ -47,9 +45,7 @@ describe('iPython notebook renderer', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, response,
- );
+ mock.reset();
});
it('does not show loading icon', () => {
@@ -86,14 +82,11 @@ describe('iPython notebook renderer', () => {
});
describe('error in JSON response', () => {
- const response = (request, next) => {
- next(request.respondWith('{ "cells": [{"cell_type": "markdown"} }', {
- status: 200,
- }));
- };
+ let mock;
beforeEach((done) => {
- Vue.http.interceptors.push(response);
+ mock = new MockAdapter(axios);
+ mock.onGet('/test').reply(() => Promise.reject({ status: 200, data: '{ "cells": [{"cell_type": "markdown"} }' }));
renderNotebook();
@@ -103,9 +96,7 @@ describe('iPython notebook renderer', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, response,
- );
+ mock.reset();
});
it('does not show loading icon', () => {
@@ -122,14 +113,11 @@ describe('iPython notebook renderer', () => {
});
describe('error getting file', () => {
- const response = (request, next) => {
- next(request.respondWith('', {
- status: 500,
- }));
- };
+ let mock;
beforeEach((done) => {
- Vue.http.interceptors.push(response);
+ mock = new MockAdapter(axios);
+ mock.onGet('/test').reply(500, '');
renderNotebook();
@@ -139,9 +127,7 @@ describe('iPython notebook renderer', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(
- Vue.http.interceptors, response,
- );
+ mock.reset();
});
it('does not show loading icon', () => {
diff --git a/spec/javascripts/boards/board_blank_state_spec.js b/spec/javascripts/boards/board_blank_state_spec.js
index 2ee3792dd65..f757dadfada 100644
--- a/spec/javascripts/boards/board_blank_state_spec.js
+++ b/spec/javascripts/boards/board_blank_state_spec.js
@@ -1,9 +1,8 @@
/* global BoardService */
-/* global mockBoardService */
import Vue from 'vue';
import '~/boards/stores/boards_store';
import boardBlankState from '~/boards/components/board_blank_state';
-import './mock_data';
+import { mockBoardService } from './mock_data';
describe('Boards blank state', () => {
let vm;
@@ -20,17 +19,15 @@ describe('Boards blank state', () => {
reject();
} else {
resolve({
- json() {
- return [{
- id: 1,
- title: 'To Do',
- label: { id: 1 },
- }, {
- id: 2,
- title: 'Doing',
- label: { id: 2 },
- }];
- },
+ data: [{
+ id: 1,
+ title: 'To Do',
+ label: { id: 1 },
+ }, {
+ id: 2,
+ title: 'Doing',
+ label: { id: 2 },
+ }],
});
}
}));
diff --git a/spec/javascripts/boards/board_card_spec.js b/spec/javascripts/boards/board_card_spec.js
index 8f607899b20..4e73fa1fe87 100644
--- a/spec/javascripts/boards/board_card_spec.js
+++ b/spec/javascripts/boards/board_card_spec.js
@@ -1,12 +1,11 @@
/* global List */
/* global ListAssignee */
/* global ListLabel */
-/* global listObj */
-/* global boardsMockInterceptor */
/* global BoardService */
-/* global mockBoardService */
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import '~/boards/models/assignee';
import eventHub from '~/boards/eventhub';
@@ -14,13 +13,15 @@ import '~/boards/models/list';
import '~/boards/models/label';
import '~/boards/stores/boards_store';
import boardCard from '~/boards/components/board_card.vue';
-import './mock_data';
+import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data';
describe('Board card', () => {
let vm;
+ let mock;
beforeEach((done) => {
- Vue.http.interceptors.push(boardsMockInterceptor);
+ mock = new MockAdapter(axios);
+ mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService();
gl.issueBoards.BoardsStore.create();
@@ -54,7 +55,7 @@ describe('Board card', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor);
+ mock.reset();
});
it('returns false when detailIssue is empty', () => {
diff --git a/spec/javascripts/boards/board_list_spec.js b/spec/javascripts/boards/board_list_spec.js
index 6bd00943a8f..7c5888b6d82 100644
--- a/spec/javascripts/boards/board_list_spec.js
+++ b/spec/javascripts/boards/board_list_spec.js
@@ -1,11 +1,9 @@
/* global BoardService */
-/* global boardsMockInterceptor */
/* global List */
-/* global listObj */
/* global ListIssue */
-/* global mockBoardService */
import Vue from 'vue';
-import _ from 'underscore';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import Sortable from 'vendor/Sortable';
import BoardList from '~/boards/components/board_list';
import eventHub from '~/boards/eventhub';
@@ -13,18 +11,20 @@ import '~/boards/mixins/sortable_default_options';
import '~/boards/models/issue';
import '~/boards/models/list';
import '~/boards/stores/boards_store';
-import './mock_data';
+import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data';
window.Sortable = Sortable;
describe('Board list component', () => {
+ let mock;
let component;
beforeEach((done) => {
const el = document.createElement('div');
document.body.appendChild(el);
- Vue.http.interceptors.push(boardsMockInterceptor);
+ mock = new MockAdapter(axios);
+ mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService();
gl.issueBoards.BoardsStore.create();
gl.IssueBoardsApp = new Vue();
@@ -60,7 +60,7 @@ describe('Board list component', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor);
+ mock.reset();
});
it('renders component', () => {
diff --git a/spec/javascripts/boards/board_new_issue_spec.js b/spec/javascripts/boards/board_new_issue_spec.js
index 02e6692dda8..c62c537841c 100644
--- a/spec/javascripts/boards/board_new_issue_spec.js
+++ b/spec/javascripts/boards/board_new_issue_spec.js
@@ -1,24 +1,22 @@
-/* global boardsMockInterceptor */
/* global BoardService */
/* global List */
-/* global listObj */
-/* global mockBoardService */
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import boardNewIssue from '~/boards/components/board_new_issue';
import '~/boards/models/list';
-import './mock_data';
+import { listObj, boardsMockInterceptor, mockBoardService } from './mock_data';
describe('Issue boards new issue form', () => {
let vm;
let list;
+ let mock;
let newIssueMock;
const promiseReturn = {
- json() {
- return {
- iid: 100,
- };
+ data: {
+ iid: 100,
},
};
@@ -35,7 +33,9 @@ describe('Issue boards new issue form', () => {
const BoardNewIssueComp = Vue.extend(boardNewIssue);
- Vue.http.interceptors.push(boardsMockInterceptor);
+ mock = new MockAdapter(axios);
+ mock.onAny().reply(boardsMockInterceptor);
+
gl.boardService = mockBoardService();
gl.issueBoards.BoardsStore.create();
gl.IssueBoardsApp = new Vue();
@@ -56,7 +56,10 @@ describe('Issue boards new issue form', () => {
.catch(done.fail);
});
- afterEach(() => vm.$destroy());
+ afterEach(() => {
+ vm.$destroy();
+ mock.reset();
+ });
it('calls submit if submit button is clicked', (done) => {
spyOn(vm, 'submit').and.callFake(e => e.preventDefault());
diff --git a/spec/javascripts/boards/boards_store_spec.js b/spec/javascripts/boards/boards_store_spec.js
index 0e656858182..49fb20f4c84 100644
--- a/spec/javascripts/boards/boards_store_spec.js
+++ b/spec/javascripts/boards/boards_store_spec.js
@@ -1,12 +1,10 @@
/* eslint-disable comma-dangle, one-var, no-unused-vars */
/* global BoardService */
-/* global boardsMockInterceptor */
-/* global listObj */
-/* global listObjDuplicate */
/* global ListIssue */
-/* global mockBoardService */
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import Cookies from 'js-cookie';
import '~/boards/models/issue';
@@ -15,11 +13,14 @@ import '~/boards/models/list';
import '~/boards/models/assignee';
import '~/boards/services/board_service';
import '~/boards/stores/boards_store';
-import './mock_data';
+import { listObj, listObjDuplicate, boardsMockInterceptor, mockBoardService } from './mock_data';
describe('Store', () => {
+ let mock;
+
beforeEach(() => {
- Vue.http.interceptors.push(boardsMockInterceptor);
+ mock = new MockAdapter(axios);
+ mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService();
gl.issueBoards.BoardsStore.create();
@@ -34,7 +35,7 @@ describe('Store', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor);
+ mock.reset();
});
it('starts with a blank state', () => {
diff --git a/spec/javascripts/boards/components/board_spec.js b/spec/javascripts/boards/components/board_spec.js
index 8dacac20cad..19346e305cf 100644
--- a/spec/javascripts/boards/components/board_spec.js
+++ b/spec/javascripts/boards/components/board_spec.js
@@ -1,9 +1,8 @@
-/* global mockBoardService */
import Vue from 'vue';
import '~/boards/services/board_service';
import '~/boards/components/board';
import '~/boards/models/list';
-import '../mock_data';
+import { mockBoardService } from '../mock_data';
describe('Board component', () => {
let vm;
diff --git a/spec/javascripts/boards/issue_card_spec.js b/spec/javascripts/boards/issue_card_spec.js
index 7d430ec35e2..8ef221257be 100644
--- a/spec/javascripts/boards/issue_card_spec.js
+++ b/spec/javascripts/boards/issue_card_spec.js
@@ -1,6 +1,5 @@
/* global ListAssignee */
/* global ListLabel */
-/* global listObj */
/* global ListIssue */
import Vue from 'vue';
@@ -11,7 +10,7 @@ import '~/boards/models/list';
import '~/boards/models/assignee';
import '~/boards/stores/boards_store';
import '~/boards/components/issue_card_inner';
-import './mock_data';
+import { listObj } from './mock_data';
describe('Issue card component', () => {
const user = new ListAssignee({
diff --git a/spec/javascripts/boards/issue_spec.js b/spec/javascripts/boards/issue_spec.js
index 41dcb19df3c..dbbe14fe3e0 100644
--- a/spec/javascripts/boards/issue_spec.js
+++ b/spec/javascripts/boards/issue_spec.js
@@ -1,7 +1,6 @@
/* eslint-disable comma-dangle */
/* global BoardService */
/* global ListIssue */
-/* global mockBoardService */
import Vue from 'vue';
import '~/boards/models/issue';
@@ -10,7 +9,7 @@ import '~/boards/models/list';
import '~/boards/models/assignee';
import '~/boards/services/board_service';
import '~/boards/stores/boards_store';
-import './mock_data';
+import { mockBoardService } from './mock_data';
describe('Issue model', () => {
let issue;
diff --git a/spec/javascripts/boards/list_spec.js b/spec/javascripts/boards/list_spec.js
index eead396ca7e..645ce831b53 100644
--- a/spec/javascripts/boards/list_spec.js
+++ b/spec/javascripts/boards/list_spec.js
@@ -1,13 +1,10 @@
/* eslint-disable comma-dangle */
-/* global boardsMockInterceptor */
/* global BoardService */
-/* global mockBoardService */
/* global List */
/* global ListIssue */
-/* global listObj */
-/* global listObjDuplicate */
-import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import '~/boards/models/issue';
import '~/boards/models/label';
@@ -15,13 +12,15 @@ import '~/boards/models/list';
import '~/boards/models/assignee';
import '~/boards/services/board_service';
import '~/boards/stores/boards_store';
-import './mock_data';
+import { listObj, listObjDuplicate, boardsMockInterceptor, mockBoardService } from './mock_data';
describe('List model', () => {
let list;
+ let mock;
beforeEach(() => {
- Vue.http.interceptors.push(boardsMockInterceptor);
+ mock = new MockAdapter(axios);
+ mock.onAny().reply(boardsMockInterceptor);
gl.boardService = mockBoardService({
bulkUpdatePath: '/test/issue-boards/board/1/lists',
});
@@ -31,7 +30,7 @@ describe('List model', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, boardsMockInterceptor);
+ mock.reset();
});
it('gets issues when created', (done) => {
@@ -158,10 +157,8 @@ describe('List model', () => {
describe('newIssue', () => {
beforeEach(() => {
spyOn(gl.boardService, 'newIssue').and.returnValue(Promise.resolve({
- json() {
- return {
- id: 42,
- };
+ data: {
+ id: 42,
},
}));
});
diff --git a/spec/javascripts/boards/mock_data.js b/spec/javascripts/boards/mock_data.js
index 0a93086985e..9ae2d535398 100644
--- a/spec/javascripts/boards/mock_data.js
+++ b/spec/javascripts/boards/mock_data.js
@@ -1,20 +1,20 @@
/* global BoardService */
/* eslint-disable comma-dangle, no-unused-vars, quote-props */
-const listObj = {
- id: _.random(10000),
+export const listObj = {
+ id: 300,
position: 0,
title: 'Test',
list_type: 'label',
label: {
- id: _.random(10000),
+ id: 5000,
title: 'Testing',
color: 'red',
description: 'testing;'
}
};
-const listObjDuplicate = {
+export const listObjDuplicate = {
id: listObj.id,
position: 1,
title: 'Test',
@@ -27,9 +27,9 @@ const listObjDuplicate = {
}
};
-const BoardsMockData = {
+export const BoardsMockData = {
'GET': {
- '/test/boards/1{/id}/issues': {
+ '/test/-/boards/1/lists/300/issues?id=300&page=1&=': {
issues: [{
title: 'Testing',
id: 1,
@@ -41,7 +41,7 @@ const BoardsMockData = {
}
},
'POST': {
- '/test/boards/1{/id}': listObj
+ '/test/-/boards/1/lists': listObj
},
'PUT': {
'/test/issue-boards/board/1/lists{/id}': {}
@@ -51,17 +51,14 @@ const BoardsMockData = {
}
};
-const boardsMockInterceptor = (request, next) => {
- const body = BoardsMockData[request.method][request.url];
-
- next(request.respondWith(JSON.stringify(body), {
- status: 200
- }));
+export const boardsMockInterceptor = (config) => {
+ const body = BoardsMockData[config.method.toUpperCase()][config.url];
+ return [200, body];
};
-const mockBoardService = (opts = {}) => {
- const boardsEndpoint = opts.boardsEndpoint || '/test/issue-boards/board';
- const listsEndpoint = opts.listsEndpoint || '/test/boards/1';
+export const mockBoardService = (opts = {}) => {
+ const boardsEndpoint = opts.boardsEndpoint || '/test/issue-boards/boards.json';
+ const listsEndpoint = opts.listsEndpoint || '/test/-/boards/1/lists';
const bulkUpdatePath = opts.bulkUpdatePath || '';
const boardId = opts.boardId || '1';
@@ -72,9 +69,3 @@ const mockBoardService = (opts = {}) => {
boardId,
});
};
-
-window.listObj = listObj;
-window.listObjDuplicate = listObjDuplicate;
-window.BoardsMockData = BoardsMockData;
-window.boardsMockInterceptor = boardsMockInterceptor;
-window.mockBoardService = mockBoardService;
diff --git a/spec/javascripts/filtered_search/filtered_search_manager_spec.js b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
index 5111632d681..b8890e4cda1 100644
--- a/spec/javascripts/filtered_search/filtered_search_manager_spec.js
+++ b/spec/javascripts/filtered_search/filtered_search_manager_spec.js
@@ -252,6 +252,7 @@ describe('Filtered Search Manager', () => {
it('removes last token', () => {
spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough();
dispatchBackspaceEvent(input, 'keyup');
+ dispatchBackspaceEvent(input, 'keyup');
expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).toHaveBeenCalled();
});
@@ -259,6 +260,7 @@ describe('Filtered Search Manager', () => {
it('sets the input', () => {
spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough();
dispatchDeleteEvent(input, 'keyup');
+ dispatchDeleteEvent(input, 'keyup');
expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).toHaveBeenCalled();
expect(input.value).toEqual('~bug');
@@ -276,6 +278,18 @@ describe('Filtered Search Manager', () => {
expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled();
expect(input.value).toEqual('text');
});
+
+ it('does not remove previous token on single backspace press', () => {
+ spyOn(gl.FilteredSearchVisualTokens, 'removeLastTokenPartial').and.callThrough();
+ spyOn(gl.FilteredSearchVisualTokens, 'getLastTokenPartial').and.callThrough();
+
+ input.value = 't';
+ dispatchDeleteEvent(input, 'keyup');
+
+ expect(gl.FilteredSearchVisualTokens.removeLastTokenPartial).not.toHaveBeenCalled();
+ expect(gl.FilteredSearchVisualTokens.getLastTokenPartial).not.toHaveBeenCalled();
+ expect(input.value).toEqual('t');
+ });
});
describe('removeToken', () => {
diff --git a/spec/javascripts/issue_show/components/app_spec.js b/spec/javascripts/issue_show/components/app_spec.js
index 7159148f8fa..1454ca52018 100644
--- a/spec/javascripts/issue_show/components/app_spec.js
+++ b/spec/javascripts/issue_show/components/app_spec.js
@@ -1,4 +1,6 @@
import Vue from 'vue';
+import MockAdapter from 'axios-mock-adapter';
+import axios from '~/lib/utils/axios_utils';
import '~/render_math';
import '~/render_gfm';
import * as urlUtils from '~/lib/utils/url_utility';
@@ -11,26 +13,29 @@ function formatText(text) {
return text.trim().replace(/\s\s+/g, ' ');
}
+const REALTIME_REQUEST_STACK = [
+ issueShowData.initialRequest,
+ issueShowData.secondRequest,
+];
+
describe('Issuable output', () => {
- let requestData = issueShowData.initialRequest;
+ let mock;
+ let realtimeRequestCount = 0;
+ let vm;
document.body.innerHTML = '<span id="task_status"></span>';
- const interceptor = (request, next) => {
- next(request.respondWith(JSON.stringify(requestData), {
- status: 200,
- }));
- };
-
- let vm;
-
beforeEach((done) => {
spyOn(eventHub, '$emit');
const IssuableDescriptionComponent = Vue.extend(issuableApp);
- requestData = issueShowData.initialRequest;
- Vue.http.interceptors.push(interceptor);
+ mock = new MockAdapter(axios);
+ mock.onGet('/gitlab-org/gitlab-shell/issues/9/realtime_changes/realtime_changes').reply(() => {
+ const res = Promise.resolve([200, REALTIME_REQUEST_STACK[realtimeRequestCount]]);
+ realtimeRequestCount += 1;
+ return res;
+ });
vm = new IssuableDescriptionComponent({
propsData: {
@@ -54,10 +59,10 @@ describe('Issuable output', () => {
});
afterEach(() => {
- Vue.http.interceptors = _.without(Vue.http.interceptors, interceptor);
+ mock.reset();
+ realtimeRequestCount = 0;
vm.poll.stop();
-
vm.$destroy();
});
@@ -77,7 +82,6 @@ describe('Issuable output', () => {
expect(editedText.querySelector('time')).toBeTruthy();
})
.then(() => {
- requestData = issueShowData.secondRequest;
vm.poll.makeRequest();
})
.then(() => new Promise(resolve => setTimeout(resolve)))
@@ -141,24 +145,19 @@ describe('Issuable output', () => {
spyOn(vm.service, 'getData').and.callThrough();
spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve) => {
resolve({
- json() {
- return {
- confidential: false,
- web_url: location.pathname,
- };
+ data: {
+ confidential: false,
+ web_url: location.pathname,
},
});
}));
- vm.updateIssuable();
-
- setTimeout(() => {
- expect(
- vm.service.getData,
- ).toHaveBeenCalled();
-
- done();
- });
+ vm.updateIssuable()
+ .then(() => {
+ expect(vm.service.getData).toHaveBeenCalled();
+ })
+ .then(done)
+ .catch(done.fail);
});
it('correctly updates issuable data', (done) => {
@@ -166,29 +165,22 @@ describe('Issuable output', () => {
resolve();
}));
- vm.updateIssuable();
-
- setTimeout(() => {
- expect(
- vm.service.updateIssuable,
- ).toHaveBeenCalledWith(vm.formState);
- expect(
- eventHub.$emit,
- ).toHaveBeenCalledWith('close.form');
-
- done();
- });
+ vm.updateIssuable()
+ .then(() => {
+ expect(vm.service.updateIssuable).toHaveBeenCalledWith(vm.formState);
+ expect(eventHub.$emit).toHaveBeenCalledWith('close.form');
+ })
+ .then(done)
+ .catch(done.fail);
});
it('does not redirect if issue has not moved', (done) => {
spyOn(urlUtils, 'visitUrl');
spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve) => {
resolve({
- json() {
- return {
- web_url: location.pathname,
- confidential: vm.isConfidential,
- };
+ data: {
+ web_url: location.pathname,
+ confidential: vm.isConfidential,
},
});
}));
@@ -208,11 +200,9 @@ describe('Issuable output', () => {
spyOn(urlUtils, 'visitUrl');
spyOn(vm.service, 'updateIssuable').and.callFake(() => new Promise((resolve) => {
resolve({
- json() {
- return {
- web_url: '/testing-issue-move',
- confidential: vm.isConfidential,
- };
+ data: {
+ web_url: '/testing-issue-move',
+ confidential: vm.isConfidential,
},
});
}));
@@ -283,10 +273,8 @@ describe('Issuable output', () => {
let modal;
const promise = new Promise((resolve) => {
resolve({
- json() {
- return {
- recaptcha_html: '<div class="g-recaptcha">recaptcha_html</div>',
- };
+ data: {
+ recaptcha_html: '<div class="g-recaptcha">recaptcha_html</div>',
},
});
});
@@ -323,8 +311,8 @@ describe('Issuable output', () => {
spyOn(urlUtils, 'visitUrl');
spyOn(vm.service, 'deleteIssuable').and.callFake(() => new Promise((resolve) => {
resolve({
- json() {
- return { web_url: '/test' };
+ data: {
+ web_url: '/test',
},
});
}));
@@ -345,8 +333,8 @@ describe('Issuable output', () => {
spyOn(vm.poll, 'stop').and.callThrough();
spyOn(vm.service, 'deleteIssuable').and.callFake(() => new Promise((resolve) => {
resolve({
- json() {
- return { web_url: '/test' };
+ data: {
+ web_url: '/test',
},
});
}));
@@ -385,22 +373,21 @@ describe('Issuable output', () => {
describe('open form', () => {
it('shows locked warning if form is open & data is different', (done) => {
- Vue.nextTick()
+ vm.$nextTick()
.then(() => {
vm.openForm();
- requestData = issueShowData.secondRequest;
vm.poll.makeRequest();
})
- .then(() => new Promise(resolve => setTimeout(resolve)))
+ // Wait for the request
+ .then(vm.$nextTick)
+ // Wait for the successCallback to update the store state
+ .then(vm.$nextTick)
+ // Wait for the new state to flow to the Vue components
+ .then(vm.$nextTick)
.then(() => {
- expect(
- vm.formState.lockedWarningVisible,
- ).toBeTruthy();
-
- expect(
- vm.$el.querySelector('.alert'),
- ).not.toBeNull();
+ expect(vm.formState.lockedWarningVisible).toEqual(true);
+ expect(vm.$el.querySelector('.alert')).not.toBeNull();
})
.then(done)
.catch(done.fail);
diff --git a/spec/javascripts/issue_show/mock_data.js b/spec/javascripts/issue_show/mock_data.js
index eb3111412a7..74b3efb014b 100644
--- a/spec/javascripts/issue_show/mock_data.js
+++ b/spec/javascripts/issue_show/mock_data.js
@@ -19,14 +19,4 @@ export default {
updated_by_name: 'Other User',
updated_by_path: '/other_user',
},
- issueSpecRequest: {
- title: '<p>this is a title</p>',
- title_text: 'this is a title',
- description: '<li class="task-list-item enabled"><input type="checkbox" class="task-list-item-checkbox">Task List Item</li>',
- description_text: '- [ ] Task List Item',
- task_status: '0 of 1 completed',
- updated_at: '2017-05-15T12:31:04.428Z',
- updated_by_name: 'Last User',
- updated_by_path: '/last_user',
- },
};
diff --git a/spec/javascripts/job_spec.js b/spec/javascripts/job_spec.js
index 4f06237deb5..b740c9ed893 100644
--- a/spec/javascripts/job_spec.js
+++ b/spec/javascripts/job_spec.js
@@ -1,4 +1,4 @@
-import { bytesToKiB } from '~/lib/utils/number_utils';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
import * as urlUtils from '~/lib/utils/url_utility';
import '~/lib/utils/datetime_utility';
import Job from '~/job';
@@ -169,7 +169,7 @@ describe('Job', () => {
expect(
document.querySelector('.js-truncated-info-size').textContent.trim(),
- ).toEqual(`${bytesToKiB(size)}`);
+ ).toEqual(`${numberToHumanSize(size)}`);
});
it('shows incremented size', () => {
@@ -195,7 +195,7 @@ describe('Job', () => {
expect(
document.querySelector('.js-truncated-info-size').textContent.trim(),
- ).toEqual(`${bytesToKiB(50)}`);
+ ).toEqual(`${numberToHumanSize(50)}`);
jasmine.clock().tick(4001);
@@ -209,7 +209,7 @@ describe('Job', () => {
expect(
document.querySelector('.js-truncated-info-size').textContent.trim(),
- ).toEqual(`${bytesToKiB(60)}`);
+ ).toEqual(`${numberToHumanSize(60)}`);
});
it('renders the raw link', () => {
diff --git a/spec/javascripts/repo/components/repo_file_spec.js b/spec/javascripts/repo/components/repo_file_spec.js
index e8b370f97b4..0810da87e80 100644
--- a/spec/javascripts/repo/components/repo_file_spec.js
+++ b/spec/javascripts/repo/components/repo_file_spec.js
@@ -32,13 +32,9 @@ describe('RepoFile', () => {
vm.$mount();
const name = vm.$el.querySelector('.repo-file-name');
- const fileIcon = vm.$el.querySelector('.file-icon');
- expect(vm.$el.querySelector(`.${vm.file.icon}`).style.marginLeft).toEqual('0px');
expect(name.href).toMatch('');
expect(name.textContent.trim()).toEqual(vm.file.name);
- expect(fileIcon.classList.contains(vm.file.icon)).toBeTruthy();
- expect(fileIcon.style.marginLeft).toEqual(`${vm.file.level * 10}px`);
});
it('does render if hasFiles is true and is loading tree', () => {
@@ -49,17 +45,6 @@ describe('RepoFile', () => {
expect(vm.$el.querySelector('.fa-spin.fa-spinner')).toBeFalsy();
});
- it('renders a spinner if the file is loading', () => {
- const f = file();
- f.loading = true;
- vm = createComponent({
- file: f,
- });
-
- expect(vm.$el.querySelector('.fa-spin.fa-spinner')).not.toBeNull();
- expect(vm.$el.querySelector('.fa-spin.fa-spinner').style.marginLeft).toEqual(`${vm.file.level * 16}px`);
- });
-
it('does not render commit message and datetime if mini', (done) => {
vm = createComponent({
file: file(),
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js
index db7d083065b..6a59dc3c87e 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_deployment_spec.js
@@ -95,10 +95,8 @@ describe('MRWidgetDeployment', () => {
const url = '/foo/bar';
const returnPromise = () => new Promise((resolve) => {
resolve({
- json() {
- return {
- redirect_url: url,
- };
+ data: {
+ redirect_url: url,
},
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js
index 2ae3adc1f93..07ed7f7f532 100644
--- a/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/mr_widget_memory_usage_spec.js
@@ -155,9 +155,7 @@ describe('MemoryUsage', () => {
describe('loadMetrics', () => {
const returnServicePromise = () => new Promise((resolve) => {
resolve({
- json() {
- return metricsMockData;
- },
+ data: metricsMockData,
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
index d23b558f4ea..1bf97bbf093 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_closed_spec.js
@@ -4,13 +4,16 @@ import closedComponent from '~/vue_merge_request_widget/components/states/mr_wid
const mr = {
targetBranch: 'good-branch',
targetBranchPath: '/good-branch',
- closedEvent: {
- author: {
+ metrics: {
+ mergedBy: {},
+ mergedAt: 'mergedUpdatedAt',
+ closedBy: {
name: 'Fatih Acet',
username: 'fatihacet',
},
- updatedAt: 'closedEventUpdatedAt',
- formattedUpdatedAt: '',
+ closedAt: 'closedEventUpdatedAt',
+ readableMergedAt: '',
+ readableClosedAt: '',
},
updatedAt: 'mrUpdatedAt',
closedAt: '1 day ago',
@@ -56,7 +59,7 @@ describe('MRWidgetClosed', () => {
it('should have correct elements', () => {
expect(el.querySelector('h4').textContent).toContain('Closed by');
- expect(el.querySelector('h4').textContent).toContain(mr.closedEvent.author.name);
+ expect(el.querySelector('h4').textContent).toContain(mr.metrics.closedBy.name);
expect(el.textContent).toContain('The changes were not merged into');
expect(el.querySelector('.label-branch').getAttribute('href')).toEqual(mr.targetBranchPath);
expect(el.querySelector('.label-branch').textContent).toContain(mr.targetBranch);
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js
index 9a71d0b47d7..5f4df15bcd6 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merge_when_pipeline_succeeds_spec.js
@@ -108,9 +108,7 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => {
spyOn(eventHub, '$emit');
spyOn(vm.service, 'cancelAutomaticMerge').and.returnValue(new Promise((resolve) => {
resolve({
- json() {
- return mrObj;
- },
+ data: mrObj,
});
}));
@@ -129,10 +127,8 @@ describe('MRWidgetMergeWhenPipelineSucceeds', () => {
spyOn(eventHub, '$emit');
spyOn(vm.service.mergeResource, 'save').and.returnValue(new Promise((resolve) => {
resolve({
- json() {
- return {
- status: 'merge_when_pipeline_succeeds',
- };
+ data: {
+ status: 'merge_when_pipeline_succeeds',
},
});
}));
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
index 2714e8294fa..2dc3b72ea40 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_merged_spec.js
@@ -14,10 +14,13 @@ const createComponent = () => {
canRevertInCurrentMR: true,
canRemoveSourceBranch: true,
sourceBranchRemoved: true,
- mergedEvent: {
- author: {},
- updatedAt: 'mergedUpdatedAt',
- formattedUpdatedAt: '',
+ metrics: {
+ mergedBy: {},
+ mergedAt: 'mergedUpdatedAt',
+ readableMergedAt: '',
+ closedBy: {},
+ closedAt: 'mergedUpdatedAt',
+ readableClosedAt: '',
},
updatedAt: 'mrUpdatedAt',
targetBranch,
@@ -111,10 +114,8 @@ describe('MRWidgetMerged', () => {
spyOn(eventHub, '$emit');
spyOn(vm.service, 'removeSourceBranch').and.returnValue(new Promise((resolve) => {
resolve({
- json() {
- return {
- message: 'Branch was removed',
- };
+ data: {
+ message: 'Branch was removed',
},
});
}));
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
index df3d29ee1f9..1127576617b 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_ready_to_merge_spec.js
@@ -292,8 +292,8 @@ describe('MRWidgetReadyToMerge', () => {
describe('handleMergeButtonClick', () => {
const returnPromise = status => new Promise((resolve) => {
resolve({
- json() {
- return { status };
+ data: {
+ status,
},
});
});
@@ -364,8 +364,9 @@ describe('MRWidgetReadyToMerge', () => {
describe('handleMergePolling', () => {
const returnPromise = state => new Promise((resolve) => {
resolve({
- json() {
- return { state, source_branch_exists: true };
+ data: {
+ state,
+ source_branch_exists: true,
},
});
});
@@ -422,8 +423,8 @@ describe('MRWidgetReadyToMerge', () => {
describe('handleRemoveBranchPolling', () => {
const returnPromise = state => new Promise((resolve) => {
resolve({
- json() {
- return { source_branch_exists: state };
+ data: {
+ source_branch_exists: state,
},
});
});
diff --git a/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
index 2cb3aaa6951..98ab61a0367 100644
--- a/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
+++ b/spec/javascripts/vue_mr_widget/components/states/mr_widget_wip_spec.js
@@ -50,9 +50,7 @@ describe('MRWidgetWIP', () => {
spyOn(eventHub, '$emit');
spyOn(vm.service, 'removeWIP').and.returnValue(new Promise((resolve) => {
resolve({
- json() {
- return mrObj;
- },
+ data: mrObj,
});
}));
diff --git a/spec/javascripts/vue_mr_widget/mock_data.js b/spec/javascripts/vue_mr_widget/mock_data.js
index 1ad7c2d8efa..ca29c9fee32 100644
--- a/spec/javascripts/vue_mr_widget/mock_data.js
+++ b/spec/javascripts/vue_mr_widget/mock_data.js
@@ -33,8 +33,8 @@ export default {
"source_project_id": 19,
"target_branch": "master",
"target_project_id": 19,
- "merge_event": {
- "author": {
+ "metrics": {
+ "merged_by": {
"name": "Administrator",
"username": "root",
"id": 1,
@@ -42,9 +42,10 @@ export default {
"avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon",
"web_url": "http://localhost:3000/root"
},
- "updated_at": "2017-04-07T15:39:25.696Z"
+ "merged_at": "2017-04-07T15:39:25.696Z",
+ "closed_by": null,
+ "closed_at": null
},
- "closed_event": null,
"author": {
"name": "Administrator",
"username": "root",
diff --git a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
index 74b343c573e..cd00d0a39a3 100644
--- a/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/javascripts/vue_mr_widget/mr_widget_options_spec.js
@@ -8,10 +8,7 @@ import mountComponent from '../helpers/vue_mount_component_helper';
const returnPromise = data => new Promise((resolve) => {
resolve({
- json() {
- return data;
- },
- body: data,
+ data,
});
});
diff --git a/spec/javascripts/vue_shared/components/file_icon_spec.js b/spec/javascripts/vue_shared/components/file_icon_spec.js
new file mode 100644
index 00000000000..d99b17bdc79
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/file_icon_spec.js
@@ -0,0 +1,83 @@
+import Vue from 'vue';
+import fileIcon from '~/vue_shared/components/file_icon.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+describe('File Icon component', () => {
+ let vm;
+ let FileIcon;
+
+ beforeEach(() => {
+ FileIcon = Vue.extend(fileIcon);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render a span element with an svg', () => {
+ vm = mountComponent(FileIcon, {
+ fileName: 'test.js',
+ });
+
+ expect(vm.$el.tagName).toEqual('SPAN');
+ expect(vm.$el.querySelector('span > svg')).toBeDefined();
+ });
+
+ it('should render a javascript icon based on file ending', () => {
+ vm = mountComponent(FileIcon, {
+ fileName: 'test.js',
+ });
+
+ expect(vm.$el.firstChild.firstChild.getAttribute('xlink:href')).toBe(`${gon.sprite_file_icons}#javascript`);
+ });
+
+ it('should render a image icon based on file ending', () => {
+ vm = mountComponent(FileIcon, {
+ fileName: 'test.png',
+ });
+
+ expect(vm.$el.firstChild.firstChild.getAttribute('xlink:href')).toBe(`${gon.sprite_file_icons}#image`);
+ });
+
+ it('should render a webpack icon based on file namer', () => {
+ vm = mountComponent(FileIcon, {
+ fileName: 'webpack.js',
+ });
+
+ expect(vm.$el.firstChild.firstChild.getAttribute('xlink:href')).toBe(`${gon.sprite_file_icons}#webpack`);
+ });
+
+ it('should render a standard folder icon', () => {
+ vm = mountComponent(FileIcon, {
+ fileName: 'js',
+ folder: true,
+ });
+
+ expect(vm.$el.querySelector('span > svg > use').getAttribute('xlink:href')).toBe(`${gon.sprite_file_icons}#folder`);
+ });
+
+ it('should render a loading icon', () => {
+ vm = mountComponent(FileIcon, {
+ fileName: 'test.js',
+ loading: true,
+ });
+
+ expect(
+ vm.$el.querySelector('i').getAttribute('class'),
+ ).toEqual('fa fa-spin fa-spinner fa-1x');
+ });
+
+ it('should add a special class and a size class', () => {
+ vm = mountComponent(FileIcon, {
+ fileName: 'test.js',
+ cssClasses: 'extraclasses',
+ size: 120,
+ });
+
+ const classList = vm.$el.firstChild.classList;
+ const containsSizeClass = classList.contains('s120');
+ const containsCustomClass = classList.contains('extraclasses');
+ expect(containsSizeClass).toBe(true);
+ expect(containsCustomClass).toBe(true);
+ });
+});
diff --git a/spec/javascripts/vue_shared/components/panel_resizer_spec.js b/spec/javascripts/vue_shared/components/panel_resizer_spec.js
new file mode 100644
index 00000000000..70ce3dffaba
--- /dev/null
+++ b/spec/javascripts/vue_shared/components/panel_resizer_spec.js
@@ -0,0 +1,59 @@
+import Vue from 'vue';
+import panelResizer from '~/vue_shared/components/panel_resizer.vue';
+import mountComponent from '../../helpers/vue_mount_component_helper';
+
+describe('Panel Resizer component', () => {
+ let vm;
+ let PanelResizer;
+
+ const triggerEvent = (eventName, el = vm.$el, clientX = 0) => {
+ const event = document.createEvent('MouseEvents');
+ event.initMouseEvent(eventName, true, true, window, 1, clientX, 0, clientX, 0, false, false,
+ false, false, 0, null);
+
+ el.dispatchEvent(event);
+ };
+
+ beforeEach(() => {
+ PanelResizer = Vue.extend(panelResizer);
+ });
+
+ afterEach(() => {
+ vm.$destroy();
+ });
+
+ it('should render a div element with the correct classes and styles', () => {
+ vm = mountComponent(PanelResizer, {
+ startSize: 100,
+ side: 'left',
+ });
+
+ expect(vm.$el.tagName).toEqual('DIV');
+ expect(vm.$el.getAttribute('class')).toBe('dragHandle dragleft');
+ expect(vm.$el.getAttribute('style')).toBe('cursor: ew-resize;');
+ });
+
+ it('should render a div element with the correct classes for a right side panel', () => {
+ vm = mountComponent(PanelResizer, {
+ startSize: 100,
+ side: 'right',
+ });
+
+ expect(vm.$el.tagName).toEqual('DIV');
+ expect(vm.$el.getAttribute('class')).toBe('dragHandle dragright');
+ });
+
+ it('drag the resizer', () => {
+ vm = mountComponent(PanelResizer, {
+ startSize: 100,
+ side: 'left',
+ });
+
+ spyOn(vm, '$emit');
+ triggerEvent('mousedown', vm.$el);
+ triggerEvent('mousemove', document);
+ triggerEvent('mouseup', document);
+ expect(vm.$emit.calls.allArgs()).toEqual([['resize-start', 100], ['update:size', 100], ['resize-end', 100]]);
+ expect(vm.size).toBe(100);
+ });
+});
diff --git a/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
new file mode 100644
index 00000000000..dfe3b31f1c0
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/populate_merge_request_metrics_with_events_data_spec.rb
@@ -0,0 +1,124 @@
+require 'rails_helper'
+
+describe Gitlab::BackgroundMigration::PopulateMergeRequestMetricsWithEventsData, :migration, schema: 20171128214150 do
+ describe '#perform' do
+ let(:mr_with_event) { create(:merge_request) }
+ let!(:merged_event) { create(:event, :merged, target: mr_with_event) }
+ let!(:closed_event) { create(:event, :closed, target: mr_with_event) }
+
+ before do
+ # Make sure no metrics are created and kept through after_* callbacks.
+ mr_with_event.metrics.destroy!
+ end
+
+ it 'inserts metrics and updates closed and merged events' do
+ subject.perform(mr_with_event.id, mr_with_event.id)
+
+ mr_with_event.reload
+
+ expect(mr_with_event.metrics).to have_attributes(latest_closed_by_id: closed_event.author_id,
+ merged_by_id: merged_event.author_id)
+ expect(mr_with_event.metrics.latest_closed_at.to_s).to eq(closed_event.updated_at.to_s)
+ end
+ end
+
+ describe '#insert_metrics_for_range' do
+ let!(:mrs_without_metrics) { create_list(:merge_request, 3) }
+ let!(:mrs_with_metrics) { create_list(:merge_request, 2) }
+
+ before do
+ # Make sure no metrics are created and kept through after_* callbacks.
+ mrs_without_metrics.each { |m| m.metrics.destroy! }
+ end
+
+ it 'inserts merge_request_metrics for merge_requests without one' do
+ expect { subject.insert_metrics_for_range(MergeRequest.first.id, MergeRequest.last.id) }
+ .to change(MergeRequest::Metrics, :count).from(2).to(5)
+
+ mrs_without_metrics.each do |mr_without_metrics|
+ expect(mr_without_metrics.reload.metrics).to be_present
+ end
+ end
+
+ it 'does not inserts merge_request_metrics for MRs out of given range' do
+ expect { subject.insert_metrics_for_range(mrs_with_metrics.first.id, mrs_with_metrics.last.id) }
+ .not_to change(MergeRequest::Metrics, :count).from(2)
+ end
+ end
+
+ describe '#update_metrics_with_events_data' do
+ context 'closed events data update' do
+ let(:users) { create_list(:user, 3) }
+ let(:mrs_with_event) { create_list(:merge_request, 3) }
+
+ before do
+ create_list(:event, 2, :closed, author: users.first, target: mrs_with_event.first)
+ create_list(:event, 3, :closed, author: users.second, target: mrs_with_event.second)
+ create(:event, :closed, author: users.third, target: mrs_with_event.third)
+ end
+
+ it 'migrates multiple MR metrics with closed event data' do
+ mr_without_event = create(:merge_request)
+ create(:event, :merged)
+
+ subject.update_metrics_with_events_data(mrs_with_event.first.id, mrs_with_event.last.id)
+
+ mrs_with_event.each do |mr_with_event|
+ latest_event = Event.where(action: 3, target: mr_with_event).last
+
+ mr_with_event.metrics.reload
+
+ expect(mr_with_event.metrics.latest_closed_by).to eq(latest_event.author)
+ expect(mr_with_event.metrics.latest_closed_at.to_s).to eq(latest_event.updated_at.to_s)
+ end
+
+ expect(mr_without_event.metrics.reload).to have_attributes(latest_closed_by_id: nil,
+ latest_closed_at: nil)
+ end
+
+ it 'does not updates metrics out of given range' do
+ out_of_range_mr = create(:merge_request)
+ create(:event, :closed, author: users.last, target: out_of_range_mr)
+
+ expect { subject.perform(mrs_with_event.first.id, mrs_with_event.second.id) }
+ .not_to change { out_of_range_mr.metrics.reload.merged_by }
+ .from(nil)
+ end
+ end
+
+ context 'merged events data update' do
+ let(:users) { create_list(:user, 3) }
+ let(:mrs_with_event) { create_list(:merge_request, 3) }
+
+ before do
+ create_list(:event, 2, :merged, author: users.first, target: mrs_with_event.first)
+ create_list(:event, 3, :merged, author: users.second, target: mrs_with_event.second)
+ create(:event, :merged, author: users.third, target: mrs_with_event.third)
+ end
+
+ it 'migrates multiple MR metrics with merged event data' do
+ mr_without_event = create(:merge_request)
+ create(:event, :merged)
+
+ subject.update_metrics_with_events_data(mrs_with_event.first.id, mrs_with_event.last.id)
+
+ mrs_with_event.each do |mr_with_event|
+ latest_event = Event.where(action: Event::MERGED, target: mr_with_event).last
+
+ expect(mr_with_event.metrics.reload.merged_by).to eq(latest_event.author)
+ end
+
+ expect(mr_without_event.metrics.reload).to have_attributes(merged_by_id: nil)
+ end
+
+ it 'does not updates metrics out of given range' do
+ out_of_range_mr = create(:merge_request)
+ create(:event, :merged, author: users.last, target: out_of_range_mr)
+
+ expect { subject.perform(mrs_with_event.first.id, mrs_with_event.second.id) }
+ .not_to change { out_of_range_mr.metrics.reload.merged_by }
+ .from(nil)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
index 8a83e446935..b5d86df09d2 100644
--- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb
@@ -68,8 +68,14 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
expect(Project.find_by_full_path(project_path)).not_to be_nil
end
+ it 'does not schedule an import' do
+ expect_any_instance_of(Project).not_to receive(:import_schedule)
+
+ importer.create_project_if_needed
+ end
+
it 'creates the Git repo in disk' do
- FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
+ create_bare_repository("#{project_path}.git")
importer.create_project_if_needed
@@ -124,13 +130,14 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
end
it 'creates the Git repo in disk' do
- FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
+ create_bare_repository("#{project_path}.git")
importer.create_project_if_needed
project = Project.find_by_full_path("#{admin.full_path}/#{project_path}")
expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.git'))
+ expect(File).to exist(File.join(project.repository_storage_path, project.disk_path + '.wiki.git'))
end
it 'moves an existing project to the correct path' do
@@ -158,8 +165,11 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
it_behaves_like 'importing a repository'
it 'creates the Wiki git repo in disk' do
- FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.git"))
- FileUtils.mkdir_p(File.join(base_dir, "#{project_path}.wiki.git"))
+ create_bare_repository("#{project_path}.git")
+ create_bare_repository("#{project_path}.wiki.git")
+
+ expect(Projects::CreateService).to receive(:new).with(admin, hash_including(skip_wiki: true,
+ import_type: 'bare_repository')).and_call_original
importer.create_project_if_needed
@@ -182,4 +192,9 @@ describe Gitlab::BareRepositoryImport::Importer, repository: true do
end
end
end
+
+ def create_bare_repository(project_path)
+ repo_path = File.join(base_dir, project_path)
+ Gitlab::Git::Repository.create(repo_path, bare: true)
+ end
end
diff --git a/spec/lib/gitlab/bare_repository_import/repository_spec.rb b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
index 61b73abcba4..9f42cf1dfca 100644
--- a/spec/lib/gitlab/bare_repository_import/repository_spec.rb
+++ b/spec/lib/gitlab/bare_repository_import/repository_spec.rb
@@ -1,58 +1,122 @@
require 'spec_helper'
describe ::Gitlab::BareRepositoryImport::Repository do
- let(:project_repo_path) { described_class.new('/full/path/', '/full/path/to/repo.git') }
+ context 'legacy storage' do
+ subject { described_class.new('/full/path/', '/full/path/to/repo.git') }
- it 'stores the repo path' do
- expect(project_repo_path.repo_path).to eq('/full/path/to/repo.git')
- end
+ it 'stores the repo path' do
+ expect(subject.repo_path).to eq('/full/path/to/repo.git')
+ end
- it 'stores the group path' do
- expect(project_repo_path.group_path).to eq('to')
- end
+ it 'stores the group path' do
+ expect(subject.group_path).to eq('to')
+ end
- it 'stores the project name' do
- expect(project_repo_path.project_name).to eq('repo')
- end
+ it 'stores the project name' do
+ expect(subject.project_name).to eq('repo')
+ end
- it 'stores the wiki path' do
- expect(project_repo_path.wiki_path).to eq('/full/path/to/repo.wiki.git')
- end
+ it 'stores the wiki path' do
+ expect(subject.wiki_path).to eq('/full/path/to/repo.wiki.git')
+ end
+
+ describe '#processable?' do
+ it 'returns false if it is a wiki' do
+ subject = described_class.new('/full/path/', '/full/path/to/a/b/my.wiki.git')
+
+ expect(subject).not_to be_processable
+ end
+
+ it 'returns true if group path is missing' do
+ subject = described_class.new('/full/path/', '/full/path/repo.git')
- describe '#wiki?' do
- it 'returns true if it is a wiki' do
- wiki_path = described_class.new('/full/path/', '/full/path/to/a/b/my.wiki.git')
+ expect(subject).to be_processable
+ end
- expect(wiki_path.wiki?).to eq(true)
+ it 'returns true when group path and project name are present' do
+ expect(subject).to be_processable
+ end
end
- it 'returns false if it is not a wiki' do
- expect(project_repo_path.wiki?).to eq(false)
+ describe '#project_full_path' do
+ it 'returns the project full path with trailing slash in the root path' do
+ expect(subject.project_full_path).to eq('to/repo')
+ end
+
+ it 'returns the project full path with no trailing slash in the root path' do
+ subject = described_class.new('/full/path', '/full/path/to/repo.git')
+
+ expect(subject.project_full_path).to eq('to/repo')
+ end
end
end
- describe '#hashed?' do
- it 'returns true if it is a hashed folder' do
- path = described_class.new('/full/path/', '/full/path/@hashed/my.repo.git')
+ context 'hashed storage' do
+ let(:gitlab_shell) { Gitlab::Shell.new }
+ let(:repository_storage) { 'default' }
+ let(:root_path) { Gitlab.config.repositories.storages[repository_storage]['path'] }
+ let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
+ let(:hashed_path) { "@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b" }
+ let(:repo_path) { File.join(root_path, "#{hashed_path}.git") }
+ let(:wiki_path) { File.join(root_path, "#{hashed_path}.wiki.git") }
- expect(path.hashed?).to eq(true)
+ before do
+ gitlab_shell.add_repository(repository_storage, hashed_path)
+ repository = Rugged::Repository.new(repo_path)
+ repository.config['gitlab.fullpath'] = 'to/repo'
end
- it 'returns false if it is not a hashed folder' do
- expect(project_repo_path.hashed?).to eq(false)
+ after do
+ gitlab_shell.remove_repository(root_path, hashed_path)
end
- end
- describe '#project_full_path' do
- it 'returns the project full path' do
- expect(project_repo_path.repo_path).to eq('/full/path/to/repo.git')
- expect(project_repo_path.project_full_path).to eq('to/repo')
+ subject { described_class.new(root_path, repo_path) }
+
+ it 'stores the repo path' do
+ expect(subject.repo_path).to eq(repo_path)
+ end
+
+ it 'stores the wiki path' do
+ expect(subject.wiki_path).to eq(wiki_path)
+ end
+
+ it 'reads the group path from .git/config' do
+ expect(subject.group_path).to eq('to')
+ end
+
+ it 'reads the project name from .git/config' do
+ expect(subject.project_name).to eq('repo')
end
- it 'with no trailing slash in the root path' do
- repo_path = described_class.new('/full/path', '/full/path/to/repo.git')
+ describe '#processable?' do
+ it 'returns false if it is a wiki' do
+ subject = described_class.new(root_path, wiki_path)
+
+ expect(subject).not_to be_processable
+ end
+
+ it 'returns false when group and project name are missing' do
+ repository = Rugged::Repository.new(repo_path)
+ repository.config.delete('gitlab.fullpath')
+
+ expect(subject).not_to be_processable
+ end
+
+ it 'returns true when group path and project name are present' do
+ expect(subject).to be_processable
+ end
+ end
+
+ describe '#project_full_path' do
+ it 'returns the project full path with trailing slash in the root path' do
+ expect(subject.project_full_path).to eq('to/repo')
+ end
+
+ it 'returns the project full path with no trailing slash in the root path' do
+ subject = described_class.new(root_path[0...-1], repo_path)
- expect(repo_path.project_full_path).to eq('to/repo')
+ expect(subject.project_full_path).to eq('to/repo')
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/ansi2html_spec.rb b/spec/lib/gitlab/ci/ansi2html_spec.rb
index 33540eab5d6..05e2d94cbd6 100644
--- a/spec/lib/gitlab/ci/ansi2html_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2html_spec.rb
@@ -120,6 +120,10 @@ describe Gitlab::Ci::Ansi2html do
expect(convert_html("\e[48;5;240mHello")).to eq('<span class="xterm-bg-240">Hello</span>')
end
+ it "can print 256 xterm fg bold colors" do
+ expect(convert_html("\e[38;5;16;1mHello")).to eq('<span class="xterm-fg-16 term-bold">Hello</span>')
+ end
+
it "can print 256 xterm bg colors on normal magenta foreground" do
expect(convert_html("\e[48;5;16;35mHello")).to eq('<span class="term-fg-magenta xterm-bg-16">Hello</span>')
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 664ba0f7234..7727a1d81b1 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -902,7 +902,7 @@ describe Gitlab::Database::MigrationHelpers do
describe '#check_trigger_permissions!' do
it 'does nothing when the user has the correct permissions' do
expect { model.check_trigger_permissions!('users') }
- .not_to raise_error(RuntimeError)
+ .not_to raise_error
end
it 'raises RuntimeError when the user does not have the correct permissions' do
@@ -1036,4 +1036,93 @@ describe Gitlab::Database::MigrationHelpers do
end
end
end
+
+ describe '#change_column_type_using_background_migration' do
+ let!(:issue) { create(:issue) }
+
+ let(:issue_model) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = 'issues'
+ include EachBatch
+ end
+ end
+
+ it 'changes the type of a column using a background migration' do
+ expect(model)
+ .to receive(:add_column)
+ .with('issues', 'closed_at_for_type_change', :datetime_with_timezone)
+
+ expect(model)
+ .to receive(:install_rename_triggers)
+ .with('issues', :closed_at, 'closed_at_for_type_change')
+
+ expect(BackgroundMigrationWorker)
+ .to receive(:perform_in)
+ .ordered
+ .with(
+ 10.minutes,
+ 'CopyColumn',
+ ['issues', :closed_at, 'closed_at_for_type_change', issue.id, issue.id]
+ )
+
+ expect(BackgroundMigrationWorker)
+ .to receive(:perform_in)
+ .ordered
+ .with(
+ 1.hour + 10.minutes,
+ 'CleanupConcurrentTypeChange',
+ ['issues', :closed_at, 'closed_at_for_type_change']
+ )
+
+ expect(Gitlab::BackgroundMigration)
+ .to receive(:steal)
+ .ordered
+ .with('CopyColumn')
+
+ expect(Gitlab::BackgroundMigration)
+ .to receive(:steal)
+ .ordered
+ .with('CleanupConcurrentTypeChange')
+
+ model.change_column_type_using_background_migration(
+ issue_model.all,
+ :closed_at,
+ :datetime_with_timezone
+ )
+ end
+ end
+
+ describe '#perform_background_migration_inline?' do
+ it 'returns true in a test environment' do
+ allow(Rails.env)
+ .to receive(:test?)
+ .and_return(true)
+
+ expect(model.perform_background_migration_inline?).to eq(true)
+ end
+
+ it 'returns true in a development environment' do
+ allow(Rails.env)
+ .to receive(:test?)
+ .and_return(false)
+
+ allow(Rails.env)
+ .to receive(:development?)
+ .and_return(true)
+
+ expect(model.perform_background_migration_inline?).to eq(true)
+ end
+
+ it 'returns false in a production environment' do
+ allow(Rails.env)
+ .to receive(:test?)
+ .and_return(false)
+
+ allow(Rails.env)
+ .to receive(:development?)
+ .and_return(false)
+
+ expect(model.perform_background_migration_inline?).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/encoding_helper_spec.rb b/spec/lib/gitlab/encoding_helper_spec.rb
index f6e5c55240f..87ec2698fc1 100644
--- a/spec/lib/gitlab/encoding_helper_spec.rb
+++ b/spec/lib/gitlab/encoding_helper_spec.rb
@@ -145,4 +145,18 @@ describe Gitlab::EncodingHelper do
end
end
end
+
+ describe 'encode_binary' do
+ [
+ [nil, ""],
+ ["", ""],
+ [" ", " "],
+ %w(a1 a1),
+ ["编码", "\xE7\xBC\x96\xE7\xA0\x81".b]
+ ].each do |input, result|
+ it "encodes #{input.inspect} to #{result.inspect}" do
+ expect(ext_class.encode_binary(input)).to eq(result)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb
index 5ed639543e0..6d35734d306 100644
--- a/spec/lib/gitlab/git/commit_spec.rb
+++ b/spec/lib/gitlab/git/commit_spec.rb
@@ -428,6 +428,11 @@ describe Gitlab::Git::Commit, seed_helper: true do
subject { super().deletions }
it { is_expected.to eq(6) }
end
+
+ describe '#total' do
+ subject { super().total }
+ it { is_expected.to eq(17) }
+ end
end
describe '#stats with gitaly on' do
diff --git a/spec/lib/gitlab/git/gitlab_projects_spec.rb b/spec/lib/gitlab/git/gitlab_projects_spec.rb
index 24da9589458..a798b188a0d 100644
--- a/spec/lib/gitlab/git/gitlab_projects_spec.rb
+++ b/spec/lib/gitlab/git/gitlab_projects_spec.rb
@@ -243,7 +243,6 @@ describe Gitlab::Git::GitlabProjects do
let(:dest_repos_path) { tmp_repos_path }
let(:dest_repo_name) { File.join('@hashed', 'aa', 'bb', 'xyz.git') }
let(:dest_repo) { File.join(dest_repos_path, dest_repo_name) }
- let(:dest_namespace) { File.dirname(dest_repo) }
subject { gl_projects.fork_repository(dest_repos_path, dest_repo_name) }
@@ -255,37 +254,64 @@ describe Gitlab::Git::GitlabProjects do
FileUtils.rm_rf(dest_repos_path)
end
- it 'forks the repository' do
- message = "Forking repository from <#{tmp_repo_path}> to <#{dest_repo}>."
- expect(logger).to receive(:info).with(message)
+ shared_examples 'forking a repository' do
+ it 'forks the repository' do
+ is_expected.to be_truthy
- is_expected.to be_truthy
+ expect(File.exist?(dest_repo)).to be_truthy
+ expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
+ expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
+ end
+
+ it 'does not fork if a project of the same name already exists' do
+ # create a fake project at the intended destination
+ FileUtils.mkdir_p(dest_repo)
- expect(File.exist?(dest_repo)).to be_truthy
- expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
- expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
+ is_expected.to be_falsy
+ end
end
- it 'does not fork if a project of the same name already exists' do
- # create a fake project at the intended destination
- FileUtils.mkdir_p(dest_repo)
+ context 'when Gitaly fork_repository feature is enabled' do
+ it_behaves_like 'forking a repository'
+ end
- # trying to fork again should fail as the repo already exists
- message = "fork-repository failed: destination repository <#{dest_repo}> already exists."
- expect(logger).to receive(:error).with(message)
+ context 'when Gitaly fork_repository feature is disabled', :disable_gitaly do
+ it_behaves_like 'forking a repository'
- is_expected.to be_falsy
- end
+ # We seem to be stuck to having only one working Gitaly storage in tests, changing
+ # that is not very straight-forward so I'm leaving this test here for now till
+ # https://gitlab.com/gitlab-org/gitlab-ce/issues/41393 is fixed.
+ context 'different storages' do
+ let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), 'alternative') }
- context 'different storages' do
- let(:dest_repos_path) { File.join(File.dirname(tmp_repos_path), 'alternative') }
+ it 'forks the repo' do
+ is_expected.to be_truthy
- it 'forks the repo' do
- is_expected.to be_truthy
+ expect(File.exist?(dest_repo)).to be_truthy
+ expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
+ expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
+ end
+ end
- expect(File.exist?(dest_repo)).to be_truthy
- expect(File.exist?(File.join(dest_repo, 'hooks', 'pre-receive'))).to be_truthy
- expect(File.exist?(File.join(dest_repo, 'hooks', 'post-receive'))).to be_truthy
+ describe 'log messages' do
+ describe 'successful fork' do
+ it do
+ message = "Forking repository from <#{tmp_repo_path}> to <#{dest_repo}>."
+ expect(logger).to receive(:info).with(message)
+
+ subject
+ end
+ end
+
+ describe 'failed fork due existing destination' do
+ it do
+ FileUtils.mkdir_p(dest_repo)
+ message = "fork-repository failed: destination repository <#{dest_repo}> already exists."
+ expect(logger).to receive(:error).with(message)
+
+ subject
+ end
+ end
end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
new file mode 100644
index 00000000000..b9641de7eda
--- /dev/null
+++ b/spec/lib/gitlab/gitaly_client/conflicts_service_spec.rb
@@ -0,0 +1,90 @@
+require 'spec_helper'
+
+describe Gitlab::GitalyClient::ConflictsService do
+ let(:project) { create(:project, :repository) }
+ let(:target_project) { create(:project, :repository) }
+ let(:source_repository) { project.repository.raw }
+ let(:target_repository) { target_project.repository.raw }
+ let(:target_gitaly_repository) { target_repository.gitaly_repository }
+ let(:our_commit_oid) { 'f00' }
+ let(:their_commit_oid) { 'f44' }
+ let(:client) do
+ described_class.new(target_repository, our_commit_oid, their_commit_oid)
+ end
+
+ describe '#list_conflict_files' do
+ let(:request) do
+ Gitaly::ListConflictFilesRequest.new(
+ repository: target_gitaly_repository, our_commit_oid: our_commit_oid,
+ their_commit_oid: their_commit_oid
+ )
+ end
+ let(:our_path) { 'our/path' }
+ let(:their_path) { 'their/path' }
+ let(:our_mode) { 0744 }
+ let(:header) do
+ double(repository: target_gitaly_repository, commit_oid: our_commit_oid,
+ our_path: our_path, our_mode: 0744, their_path: their_path)
+ end
+ let(:response) do
+ [
+ double(files: [double(header: header), double(content: 'foo', header: nil)]),
+ double(files: [double(content: 'bar', header: nil)])
+ ]
+ end
+ let(:file) { subject[0] }
+
+ subject { client.list_conflict_files }
+
+ it 'sends an RPC request' do
+ expect_any_instance_of(Gitaly::ConflictsService::Stub).to receive(:list_conflict_files)
+ .with(request, kind_of(Hash)).and_return([])
+
+ subject
+ end
+
+ it 'forms a Gitlab::Git::ConflictFile collection from the response' do
+ allow_any_instance_of(Gitaly::ConflictsService::Stub).to receive(:list_conflict_files)
+ .with(request, kind_of(Hash)).and_return(response)
+
+ expect(subject.size).to be(1)
+ expect(file.content).to eq('foobar')
+ expect(file.their_path).to eq(their_path)
+ expect(file.our_path).to eq(our_path)
+ expect(file.our_mode).to be(our_mode)
+ expect(file.repository).to eq(target_repository)
+ expect(file.commit_oid).to eq(our_commit_oid)
+ end
+ end
+
+ describe '#resolve_conflicts' do
+ let(:user) { create(:user) }
+ let(:files) do
+ [{ old_path: 'some/path', new_path: 'some/path', content: '' }]
+ end
+ let(:source_branch) { 'master' }
+ let(:target_branch) { 'feature' }
+ let(:commit_message) { 'Solving conflicts' }
+ let(:resolution) do
+ Gitlab::Git::Conflict::Resolution.new(user, files, commit_message)
+ end
+
+ subject do
+ client.resolve_conflicts(source_repository, resolution, source_branch, target_branch)
+ end
+
+ it 'sends an RPC request' do
+ expect_any_instance_of(Gitaly::ConflictsService::Stub).to receive(:resolve_conflicts)
+ .with(kind_of(Enumerator), kind_of(Hash)).and_return(double(resolution_error: ""))
+
+ subject
+ end
+
+ it 'raises a relevant exception if resolution_error is present' do
+ expect_any_instance_of(Gitaly::ConflictsService::Stub).to receive(:resolve_conflicts)
+ .with(kind_of(Enumerator), kind_of(Hash)).and_return(double(resolution_error: "something happened"))
+
+ expect { subject }.to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index a871ed0df0e..309b7338ef0 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -38,20 +38,6 @@ describe Gitlab::GitalyClient, skip_gitaly_mock: true do
end
end
- describe 'encode' do
- [
- [nil, ""],
- ["", ""],
- [" ", " "],
- %w(a1 a1),
- ["编码", "\xE7\xBC\x96\xE7\xA0\x81".b]
- ].each do |input, result|
- it "encodes #{input.inspect} to #{result.inspect}" do
- expect(described_class.encode(input)).to eq result
- end
- end
- end
-
describe 'allow_n_plus_1_calls' do
context 'when RequestStore is enabled', :request_store do
it 'returns the result of the allow_n_plus_1_calls block' do
diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb
index 3dbe510b7ba..b5a9ac570e6 100644
--- a/spec/lib/gitlab/search_results_spec.rb
+++ b/spec/lib/gitlab/search_results_spec.rb
@@ -52,15 +52,36 @@ describe Gitlab::SearchResults do
expect(results.objects('merge_requests')).to include merge_request_2
end
- it 'includes project filter by default' do
- expect(results).to receive(:project_ids_relation).and_call_original
- results.objects('merge_requests')
+ describe '#merge_requests' do
+ it 'includes project filter by default' do
+ expect(results).to receive(:project_ids_relation).and_call_original
+
+ results.objects('merge_requests')
+ end
+
+ it 'it skips project filter if default project context is used' do
+ allow(results).to receive(:default_project_filter).and_return(true)
+
+ expect(results).not_to receive(:project_ids_relation)
+
+ results.objects('merge_requests')
+ end
end
- it 'it skips project filter if default is used' do
- allow(results).to receive(:default_project_filter).and_return(true)
- expect(results).not_to receive(:project_ids_relation)
- results.objects('merge_requests')
+ describe '#issues' do
+ it 'includes project filter by default' do
+ expect(results).to receive(:project_ids_relation).and_call_original
+
+ results.objects('issues')
+ end
+
+ it 'it skips project filter if default project context is used' do
+ allow(results).to receive(:default_project_filter).and_return(true)
+
+ expect(results).not_to receive(:project_ids_relation)
+
+ results.objects('issues')
+ end
end
end
diff --git a/spec/lib/gitlab/visibility_level_spec.rb b/spec/lib/gitlab/visibility_level_spec.rb
index 48a67773de9..d85dac630b4 100644
--- a/spec/lib/gitlab/visibility_level_spec.rb
+++ b/spec/lib/gitlab/visibility_level_spec.rb
@@ -49,4 +49,31 @@ describe Gitlab::VisibilityLevel do
.to eq([Gitlab::VisibilityLevel::PUBLIC])
end
end
+
+ describe '.allowed_levels' do
+ it 'only includes the levels that arent restricted' do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
+
+ expect(described_class.allowed_levels)
+ .to contain_exactly(described_class::PRIVATE, described_class::PUBLIC)
+ end
+ end
+
+ describe '.closest_allowed_level' do
+ it 'picks INTERNAL instead of PUBLIC if public is restricted' do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
+
+ expect(described_class.closest_allowed_level(described_class::PUBLIC))
+ .to eq(described_class::INTERNAL)
+ end
+
+ it 'picks PRIVATE if nothing is available' do
+ stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC,
+ Gitlab::VisibilityLevel::INTERNAL,
+ Gitlab::VisibilityLevel::PRIVATE])
+
+ expect(described_class.closest_allowed_level(described_class::PUBLIC))
+ .to eq(described_class::PRIVATE)
+ end
+ end
end
diff --git a/spec/migrations/clean_up_for_members_spec.rb b/spec/migrations/clean_up_for_members_spec.rb
new file mode 100644
index 00000000000..0258860d169
--- /dev/null
+++ b/spec/migrations/clean_up_for_members_spec.rb
@@ -0,0 +1,78 @@
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20171216111734_clean_up_for_members.rb')
+
+describe CleanUpForMembers, :migration do
+ let(:migration) { described_class.new }
+ let!(:group_member) { create_group_member }
+ let!(:unbinded_group_member) { create_group_member }
+ let!(:invited_group_member) { create_group_member(true) }
+ let!(:not_valid_group_member) { create_group_member }
+ let!(:project_member) { create_project_member }
+ let!(:invited_project_member) { create_project_member(true) }
+ let!(:unbinded_project_member) { create_project_member }
+ let!(:not_valid_project_member) { create_project_member }
+
+ it 'removes members without proper user_id' do
+ unbinded_group_member.update_column(:user_id, nil)
+ not_valid_group_member.update_column(:user_id, 9999)
+ unbinded_project_member.update_column(:user_id, nil)
+ not_valid_project_member.update_column(:user_id, 9999)
+
+ migrate!
+
+ expect(Member.all).not_to include(unbinded_group_member, not_valid_group_member, unbinded_project_member, not_valid_project_member)
+ expect(Member.all).to include(group_member, invited_group_member, project_member, invited_project_member)
+ end
+
+ def create_group_member(invited = false)
+ fill_member(GroupMember.new(group: create_group), invited)
+ end
+
+ def create_project_member(invited = false)
+ fill_member(ProjectMember.new(project: create_project), invited)
+ end
+
+ def fill_member(member_object, invited)
+ member_object.tap do |m|
+ m.access_level = 40
+ m.notification_level = 3
+
+ if invited
+ m.user_id = nil
+ m.invite_token = 'xxx'
+ m.invite_email = 'email@email.com'
+ else
+ m.user_id = create_user.id
+ end
+
+ m.save
+ end
+
+ member_object
+ end
+
+ def create_group
+ name = FFaker::Lorem.characters(10)
+
+ Group.create(name: name, path: name.downcase.gsub(/\s/, '_'))
+ end
+
+ def create_project
+ name = FFaker::Lorem.characters(10)
+ creator = create_user
+
+ Project.create(name: name,
+ path: name.downcase.gsub(/\s/, '_'),
+ namespace: creator.namespace,
+ creator: creator)
+ end
+
+ def create_user
+ User.create(email: FFaker::Internet.email,
+ password: '12345678',
+ name: FFaker::Name.name,
+ username: FFaker::Internet.user_name,
+ confirmed_at: Time.now,
+ confirmation_token: nil)
+ end
+end
diff --git a/spec/migrations/issues_moved_to_id_foreign_key_spec.rb b/spec/migrations/issues_moved_to_id_foreign_key_spec.rb
new file mode 100644
index 00000000000..d2eef81f396
--- /dev/null
+++ b/spec/migrations/issues_moved_to_id_foreign_key_spec.rb
@@ -0,0 +1,25 @@
+require 'spec_helper'
+require Rails.root.join('db', 'migrate', '20171106151218_issues_moved_to_id_foreign_key.rb')
+
+# The schema version has to be far enough in advance to have the
+# only_mirror_protected_branches column in the projects table to create a
+# project via FactoryBot.
+describe IssuesMovedToIdForeignKey, :migration, schema: 20171114150259 do
+ let!(:issue_first) { create(:issue, moved_to_id: issue_second.id) }
+ let!(:issue_second) { create(:issue, moved_to_id: issue_third.id) }
+ let!(:issue_third) { create(:issue) }
+
+ subject { described_class.new }
+
+ it 'removes the orphaned moved_to_id' do
+ subject.down
+
+ issue_third.update_attributes(moved_to_id: 100000)
+
+ subject.up
+
+ expect(issue_first.reload.moved_to_id).to eq(issue_second.id)
+ expect(issue_second.reload.moved_to_id).to eq(issue_third.id)
+ expect(issue_third.reload.moved_to_id).to be_nil
+ end
+end
diff --git a/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb b/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb
new file mode 100644
index 00000000000..97e089c5cb8
--- /dev/null
+++ b/spec/migrations/schedule_populate_merge_request_metrics_with_events_data_spec.rb
@@ -0,0 +1,24 @@
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20171128214150_schedule_populate_merge_request_metrics_with_events_data.rb')
+
+describe SchedulePopulateMergeRequestMetricsWithEventsData, :migration, :sidekiq do
+ let!(:mrs) { create_list(:merge_request, 3) }
+
+ it 'correctly schedules background migrations' do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_migration(10.minutes, mrs.first.id, mrs.second.id)
+
+ expect(described_class::MIGRATION)
+ .to be_scheduled_migration(20.minutes, mrs.third.id, mrs.third.id)
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+end
diff --git a/spec/models/diff_discussion_spec.rb b/spec/models/diff_discussion_spec.rb
index fa02434b0fd..50b19000799 100644
--- a/spec/models/diff_discussion_spec.rb
+++ b/spec/models/diff_discussion_spec.rb
@@ -47,8 +47,20 @@ describe DiffDiscussion do
diff_note.save!
end
- it 'returns the diff ID for the version to show' do
- expect(subject.merge_request_version_params).to eq(diff_id: merge_request_diff1.id)
+ context 'when commit_id is not present' do
+ it 'returns the diff ID for the version to show' do
+ expect(subject.merge_request_version_params).to eq(diff_id: merge_request_diff1.id)
+ end
+ end
+
+ context 'when commit_id is present' do
+ before do
+ diff_note.update_attribute(:commit_id, 'commit_123')
+ end
+
+ it 'includes the commit_id in the result' do
+ expect(subject.merge_request_version_params).to eq(diff_id: merge_request_diff1.id, commit_id: 'commit_123')
+ end
end
end
@@ -70,8 +82,20 @@ describe DiffDiscussion do
diff_note.save!
end
- it 'returns the diff ID and start sha of the versions to compare' do
- expect(subject.merge_request_version_params).to eq(diff_id: merge_request_diff3.id, start_sha: merge_request_diff1.head_commit_sha)
+ context 'when commit_id is not present' do
+ it 'returns the diff ID and start sha of the versions to compare' do
+ expect(subject.merge_request_version_params).to eq(diff_id: merge_request_diff3.id, start_sha: merge_request_diff1.head_commit_sha)
+ end
+ end
+
+ context 'when commit_id is present' do
+ before do
+ diff_note.update_attribute(:commit_id, 'commit_123')
+ end
+
+ it 'includes the commit_id in the result' do
+ expect(subject.merge_request_version_params).to eq(diff_id: merge_request_diff3.id, start_sha: merge_request_diff1.head_commit_sha, commit_id: 'commit_123')
+ end
end
end
@@ -83,8 +107,20 @@ describe DiffDiscussion do
diff_note.save!
end
- it 'returns nil' do
- expect(subject.merge_request_version_params).to be_nil
+ context 'when commit_id is not present' do
+ it 'returns empty hash' do
+ expect(subject.merge_request_version_params).to eq(nil)
+ end
+ end
+
+ context 'when commit_id is present' do
+ before do
+ diff_note.update_attribute(:commit_id, 'commit_123')
+ end
+
+ it 'returns the commit_id' do
+ expect(subject.merge_request_version_params).to eq(commit_id: 'commit_123')
+ end
end
end
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index c9deeb45c1a..5ced000cdb6 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -23,6 +23,32 @@ describe Issue do
it { is_expected.to have_db_index(:deleted_at) }
end
+ describe 'callbacks' do
+ describe '#ensure_metrics' do
+ it 'creates metrics after saving' do
+ issue = create(:issue)
+
+ expect(issue.metrics).to be_persisted
+ expect(Issue::Metrics.count).to eq(1)
+ end
+
+ it 'does not create duplicate metrics for an issue' do
+ issue = create(:issue)
+
+ issue.close!
+
+ expect(issue.metrics).to be_persisted
+ expect(Issue::Metrics.count).to eq(1)
+ end
+
+ it 'records current metrics' do
+ expect_any_instance_of(Issue::Metrics).to receive(:record!)
+
+ create(:issue)
+ end
+ end
+ end
+
describe '#order_by_position_and_priority' do
let(:project) { create :project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
diff --git a/spec/models/merge_request/metrics_spec.rb b/spec/models/merge_request/metrics_spec.rb
index 9353d5c3c8a..02ff7839739 100644
--- a/spec/models/merge_request/metrics_spec.rb
+++ b/spec/models/merge_request/metrics_spec.rb
@@ -1,16 +1,11 @@
require 'spec_helper'
describe MergeRequest::Metrics do
- subject { create(:merge_request) }
+ subject { described_class.new }
- describe "when recording the default set of metrics on merge request save" do
- it "records the merge time" do
- time = Time.now
- Timecop.freeze(time) { subject.mark_as_merged }
- metrics = subject.metrics
-
- expect(metrics).to be_present
- expect(metrics.merged_at).to be_like_time(time)
- end
+ describe 'associations' do
+ it { is_expected.to belong_to(:merge_request) }
+ it { is_expected.to belong_to(:latest_closed_by).class_name('User') }
+ it { is_expected.to belong_to(:merged_by).class_name('User') }
end
end
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index df94617a19e..d8ebd46faab 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -65,6 +65,25 @@ describe MergeRequest do
end
end
+ describe 'callbacks' do
+ describe '#ensure_merge_request_metrics' do
+ it 'creates metrics after saving' do
+ merge_request = create(:merge_request)
+
+ expect(merge_request.metrics).to be_persisted
+ expect(MergeRequest::Metrics.count).to eq(1)
+ end
+
+ it 'does not duplicate metrics for a merge request' do
+ merge_request = create(:merge_request)
+
+ merge_request.mark_as_merged!
+
+ expect(MergeRequest::Metrics.count).to eq(1)
+ end
+ end
+ end
+
describe 'respond to' do
it { is_expected.to respond_to(:unchecked?) }
it { is_expected.to respond_to(:can_be_merged?) }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index b7c6286fd83..0678cae9b93 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -203,7 +203,7 @@ describe Namespace do
context 'with subgroups' do
let(:parent) { create(:group, name: 'parent', path: 'parent') }
let(:child) { create(:group, name: 'child', path: 'child', parent: parent) }
- let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child) }
+ let!(:project) { create(:project_empty_repo, path: 'the-project', namespace: child, skip_disk_validation: true) }
let(:uploads_dir) { File.join(CarrierWave.root, FileUploader.base_dir) }
let(:pages_dir) { File.join(TestEnv.pages_path) }
@@ -240,6 +240,20 @@ describe Namespace do
end
end
end
+
+ it 'updates project full path in .git/config for each project inside namespace' do
+ parent = create(:group, name: 'mygroup', path: 'mygroup')
+ subgroup = create(:group, name: 'mysubgroup', path: 'mysubgroup', parent: parent)
+ project_in_parent_group = create(:project, :repository, namespace: parent, name: 'foo1')
+ hashed_project_in_subgroup = create(:project, :repository, :hashed, namespace: subgroup, name: 'foo2')
+ legacy_project_in_subgroup = create(:project, :repository, namespace: subgroup, name: 'foo3')
+
+ parent.update(path: 'mygroup_new')
+
+ expect(project_in_parent_group.repo.config['gitlab.fullpath']).to eq "mygroup_new/#{project_in_parent_group.path}"
+ expect(hashed_project_in_subgroup.repo.config['gitlab.fullpath']).to eq "mygroup_new/mysubgroup/#{hashed_project_in_subgroup.path}"
+ expect(legacy_project_in_subgroup.repo.config['gitlab.fullpath']).to eq "mygroup_new/mysubgroup/#{legacy_project_in_subgroup.path}"
+ end
end
describe '#rm_dir', 'callback' do
diff --git a/spec/models/project_services/kubernetes_service_spec.rb b/spec/models/project_services/kubernetes_service_spec.rb
index f037ee77a94..6980ba335b8 100644
--- a/spec/models/project_services/kubernetes_service_spec.rb
+++ b/spec/models/project_services/kubernetes_service_spec.rb
@@ -52,12 +52,75 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
context 'when service is inactive' do
before do
+ subject.project = project
subject.active = false
end
it { is_expected.not_to validate_presence_of(:api_url) }
it { is_expected.not_to validate_presence_of(:token) }
end
+
+ context 'with a deprecated service' do
+ let(:kubernetes_service) { create(:kubernetes_service) }
+
+ before do
+ kubernetes_service.update_attribute(:active, false)
+ kubernetes_service.properties[:namespace] = "foo"
+ end
+
+ it 'should not update attributes' do
+ expect(kubernetes_service.save).to be_falsy
+ end
+
+ it 'should include an error with a deprecation message' do
+ kubernetes_service.valid?
+ expect(kubernetes_service.errors[:base].first).to match(/Kubernetes service integration has been deprecated/)
+ end
+ end
+
+ context 'with a non-deprecated service' do
+ let(:kubernetes_service) { create(:kubernetes_service) }
+
+ it 'should update attributes' do
+ kubernetes_service.properties[:namespace] = 'foo'
+ expect(kubernetes_service.save).to be_truthy
+ end
+ end
+
+ context 'with an active and deprecated service' do
+ let(:kubernetes_service) { create(:kubernetes_service) }
+
+ before do
+ kubernetes_service.active = false
+ kubernetes_service.properties[:namespace] = 'foo'
+ kubernetes_service.save
+ end
+
+ it 'should deactive the service' do
+ expect(kubernetes_service.active?).to be_falsy
+ end
+
+ it 'should not include a deprecation message as error' do
+ expect(kubernetes_service.errors.messages.count).to eq(0)
+ end
+
+ it 'should update attributes' do
+ expect(kubernetes_service.properties[:namespace]).to eq("foo")
+ end
+ end
+
+ context 'with a template service' do
+ let(:kubernetes_service) { create(:kubernetes_service, template: true, active: false) }
+
+ before do
+ kubernetes_service.properties[:namespace] = 'foo'
+ end
+
+ it 'should update attributes' do
+ expect(kubernetes_service.save).to be_truthy
+ expect(kubernetes_service.properties[:namespace]).to eq('foo')
+ end
+ end
end
describe '#initialize_properties' do
@@ -318,4 +381,42 @@ describe KubernetesService, :use_clean_rails_memory_store_caching do
it { is_expected.to eq(pods: []) }
end
end
+
+ describe "#deprecated?" do
+ let(:kubernetes_service) { create(:kubernetes_service) }
+
+ context 'with an active kubernetes service' do
+ it 'should return false' do
+ expect(kubernetes_service.deprecated?).to be_falsy
+ end
+ end
+
+ context 'with a inactive kubernetes service' do
+ it 'should return true' do
+ kubernetes_service.update_attribute(:active, false)
+ expect(kubernetes_service.deprecated?).to be_truthy
+ end
+ end
+ end
+
+ describe "#deprecation_message" do
+ let(:kubernetes_service) { create(:kubernetes_service) }
+
+ it 'should indicate the service is deprecated' do
+ expect(kubernetes_service.deprecation_message).to match(/Kubernetes service integration has been deprecated/)
+ end
+
+ context 'if the services is active' do
+ it 'should return a message' do
+ expect(kubernetes_service.deprecation_message).to match(/Your cluster information on this page is still editable/)
+ end
+ end
+
+ context 'if the service is not active' do
+ it 'should return a message' do
+ kubernetes_service.update_attribute(:active, false)
+ expect(kubernetes_service.deprecation_message).to match(/Fields on this page are now uneditable/)
+ end
+ end
+ end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 7338e341359..cea22bbd184 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -2626,6 +2626,14 @@ describe Project do
project.rename_repo
end
end
+
+ it 'updates project full path in .git/config' do
+ allow(project_storage).to receive(:rename_repo).and_return(true)
+
+ project.rename_repo
+
+ expect(project.repo.config['gitlab.fullpath']).to eq(project.full_path)
+ end
end
describe '#pages_path' do
@@ -2668,14 +2676,12 @@ describe Project do
end
context 'hashed storage' do
- let(:project) { create(:project, :repository) }
+ let(:project) { create(:project, :repository, skip_disk_validation: true) }
let(:gitlab_shell) { Gitlab::Shell.new }
- let(:hash) { '6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b' }
+ let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
before do
stub_application_setting(hashed_storage_enabled: true)
- allow(Digest::SHA2).to receive(:hexdigest) { hash }
- allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
end
describe '#legacy_storage?' do
@@ -2698,13 +2704,13 @@ describe Project do
describe '#base_dir' do
it 'returns base_dir based on hash of project id' do
- expect(project.base_dir).to eq('@hashed/6b/86')
+ expect(project.base_dir).to eq("@hashed/#{hash[0..1]}/#{hash[2..3]}")
end
end
describe '#disk_path' do
it 'returns disk_path based on hash of project id' do
- hashed_path = '@hashed/6b/86/6b86b273ff34fce19d6b804eff5a3f5747ada4eaa22f1d49c01e52ddb7875b4b'
+ hashed_path = "@hashed/#{hash[0..1]}/#{hash[2..3]}/#{hash}"
expect(project.disk_path).to eq(hashed_path)
end
@@ -2712,7 +2718,9 @@ describe Project do
describe '#ensure_storage_path_exists' do
it 'delegates to gitlab_shell to ensure namespace is created' do
- expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, '@hashed/6b/86')
+ allow(project).to receive(:gitlab_shell).and_return(gitlab_shell)
+
+ expect(gitlab_shell).to receive(:add_namespace).with(project.repository_storage_path, "@hashed/#{hash[0..1]}/#{hash[2..3]}")
project.ensure_storage_path_exists
end
@@ -2772,7 +2780,7 @@ describe Project do
end
context 'when not rolled out' do
- let(:project) { create(:project, :repository, storage_version: 1) }
+ let(:project) { create(:project, :repository, storage_version: 1, skip_disk_validation: true) }
it 'moves pages folder to new location' do
expect_any_instance_of(Gitlab::UploadsTransfer).to receive(:rename_project)
@@ -2781,6 +2789,12 @@ describe Project do
end
end
end
+
+ it 'updates project full path in .git/config' do
+ project.rename_repo
+
+ expect(project.repo.config['gitlab.fullpath']).to eq(project.full_path)
+ end
end
describe '#pages_path' do
@@ -3141,4 +3155,26 @@ describe Project do
it { is_expected.to eq(platform_kubernetes) }
end
end
+
+ describe '#write_repository_config' do
+ set(:project) { create(:project, :repository) }
+
+ it 'writes full path in .git/config when key is missing' do
+ project.write_repository_config
+
+ expect(project.repo.config['gitlab.fullpath']).to eq project.full_path
+ end
+
+ it 'updates full path in .git/config when key is present' do
+ project.write_repository_config(gl_full_path: 'old/path')
+
+ expect { project.write_repository_config }.to change { project.repo.config['gitlab.fullpath'] }.from('old/path').to(project.full_path)
+ end
+
+ it 'does not raise an error with an empty repository' do
+ project = create(:project_empty_repo)
+
+ expect { project.write_repository_config }.not_to raise_error
+ end
+ end
end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 0f2f906c667..540615de117 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -254,4 +254,22 @@ describe Service do
end
end
end
+
+ describe "#deprecated?" do
+ let(:project) { create(:project, :repository) }
+
+ it 'should return false by default' do
+ service = create(:service, project: project)
+ expect(service.deprecated?).to be_falsy
+ end
+ end
+
+ describe "#deprecation_message" do
+ let(:project) { create(:project, :repository) }
+
+ it 'should be empty by default' do
+ service = create(:service, project: project)
+ expect(service.deprecation_message).to be_nil
+ end
+ end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 2557ce71f2b..047a46886c7 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -22,7 +22,9 @@ describe User do
describe 'associations' do
it { is_expected.to have_one(:namespace) }
it { is_expected.to have_many(:snippets).dependent(:destroy) }
- it { is_expected.to have_many(:project_members).dependent(:destroy) }
+ it { is_expected.to have_many(:members) }
+ it { is_expected.to have_many(:project_members) }
+ it { is_expected.to have_many(:group_members) }
it { is_expected.to have_many(:groups) }
it { is_expected.to have_many(:keys).dependent(:destroy) }
it { is_expected.to have_many(:deploy_keys).dependent(:destroy) }
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index be8d9c19125..981c9c27325 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -464,7 +464,7 @@ describe API::Notes do
describe "POST /projects/:id/noteable/:noteable_id/notes to test observer on create" do
it "creates an activity event when an issue note is created" do
- expect(Event).to receive(:create)
+ expect(Event).to receive(:create!)
post api("/projects/#{project.id}/issues/#{issue.iid}/notes", user), body: 'hi!'
end
diff --git a/spec/requests/api/project_milestones_spec.rb b/spec/requests/api/project_milestones_spec.rb
index 6fe8ab5a3f6..08ea7314bb3 100644
--- a/spec/requests/api/project_milestones_spec.rb
+++ b/spec/requests/api/project_milestones_spec.rb
@@ -16,7 +16,7 @@ describe API::ProjectMilestones do
describe 'PUT /projects/:id/milestones/:milestone_id to test observer on close' do
it 'creates an activity event when an milestone is closed' do
- expect(Event).to receive(:create)
+ expect(Event).to receive(:create!)
put api("/projects/#{project.id}/milestones/#{milestone.id}", user),
state_event: 'close'
diff --git a/spec/requests/api/services_spec.rb b/spec/requests/api/services_spec.rb
index ceafa0e2058..26d56c04862 100644
--- a/spec/requests/api/services_spec.rb
+++ b/spec/requests/api/services_spec.rb
@@ -53,6 +53,10 @@ describe API::Services do
describe "DELETE /projects/:id/services/#{service.dasherize}" do
include_context service
+ before do
+ initialize_service(service)
+ end
+
it "deletes #{service}" do
delete api("/projects/#{project.id}/services/#{dashed_service}", user)
@@ -67,9 +71,7 @@ describe API::Services do
# inject some properties into the service
before do
- service_object = project.find_or_initialize_service(service)
- service_object.properties = service_attrs
- service_object.save
+ initialize_service(service)
end
it 'returns authentication error when unauthenticated' do
diff --git a/spec/requests/api/v3/milestones_spec.rb b/spec/requests/api/v3/milestones_spec.rb
index 9ee71ea9c5d..6021600e09c 100644
--- a/spec/requests/api/v3/milestones_spec.rb
+++ b/spec/requests/api/v3/milestones_spec.rb
@@ -161,7 +161,7 @@ describe API::V3::Milestones do
describe 'PUT /projects/:id/milestones/:milestone_id to test observer on close' do
it 'creates an activity event when an milestone is closed' do
- expect(Event).to receive(:create)
+ expect(Event).to receive(:create!)
put v3_api("/projects/#{project.id}/milestones/#{milestone.id}", user),
state_event: 'close'
diff --git a/spec/requests/api/v3/notes_spec.rb b/spec/requests/api/v3/notes_spec.rb
index 6428d9daaba..5532795ab02 100644
--- a/spec/requests/api/v3/notes_spec.rb
+++ b/spec/requests/api/v3/notes_spec.rb
@@ -302,7 +302,7 @@ describe API::V3::Notes do
describe "POST /projects/:id/noteable/:noteable_id/notes to test observer on create" do
it "creates an activity event when an issue note is created" do
- expect(Event).to receive(:create)
+ expect(Event).to receive(:create!)
post v3_api("/projects/#{project.id}/issues/#{issue.id}/notes", user), body: 'hi!'
end
diff --git a/spec/requests/api/v3/services_spec.rb b/spec/requests/api/v3/services_spec.rb
index 8f212ab6be6..c69a7d58ca6 100644
--- a/spec/requests/api/v3/services_spec.rb
+++ b/spec/requests/api/v3/services_spec.rb
@@ -10,6 +10,10 @@ describe API::V3::Services do
describe "DELETE /projects/:id/services/#{service.dasherize}" do
include_context service
+ before do
+ initialize_service(service)
+ end
+
it "deletes #{service}" do
delete v3_api("/projects/#{project.id}/services/#{dashed_service}", user)
diff --git a/spec/requests/api/wikis_spec.rb b/spec/requests/api/wikis_spec.rb
index 65bd001e491..fb0806ff9f1 100644
--- a/spec/requests/api/wikis_spec.rb
+++ b/spec/requests/api/wikis_spec.rb
@@ -12,6 +12,8 @@ require 'spec_helper'
describe API::Wikis do
let(:user) { create(:user) }
+ let(:group) { create(:group).tap { |g| g.add_owner(user) } }
+ let(:project_wiki) { create(:project_wiki, project: project, user: user) }
let(:payload) { { content: 'content', format: 'rdoc', title: 'title' } }
let(:expected_keys_with_content) { %w(content format slug title) }
let(:expected_keys_without_content) { %w(format slug title) }
@@ -19,8 +21,8 @@ describe API::Wikis do
shared_examples_for 'returns list of wiki pages' do
context 'when wiki has pages' do
let!(:pages) do
- [create(:wiki_page, wiki: project.wiki, attrs: { title: 'page1', content: 'content of page1' }),
- create(:wiki_page, wiki: project.wiki, attrs: { title: 'page2', content: 'content of page2' })]
+ [create(:wiki_page, wiki: project_wiki, attrs: { title: 'page1', content: 'content of page1' }),
+ create(:wiki_page, wiki: project_wiki, attrs: { title: 'page2', content: 'content of page2' })]
end
it 'returns the list of wiki pages without content' do
@@ -445,7 +447,7 @@ describe API::Wikis do
end
describe 'PUT /projects/:id/wikis/:slug' do
- let(:page) { create(:wiki_page, wiki: project.wiki) }
+ let(:page) { create(:wiki_page, wiki: project_wiki) }
let(:payload) { { title: 'new title', content: 'new content' } }
let(:url) { "/projects/#{project.id}/wikis/#{page.slug}" }
@@ -568,10 +570,20 @@ describe API::Wikis do
end
end
end
+
+ context 'when wiki belongs to a group project' do
+ let(:project) { create(:project, namespace: group) }
+
+ before do
+ put(api(url, user), payload)
+ end
+
+ include_examples 'updates wiki page'
+ end
end
describe 'DELETE /projects/:id/wikis/:slug' do
- let(:page) { create(:wiki_page, wiki: project.wiki) }
+ let(:page) { create(:wiki_page, wiki: project_wiki) }
let(:url) { "/projects/#{project.id}/wikis/#{page.slug}" }
context 'when wiki is disabled' do
@@ -675,5 +687,15 @@ describe API::Wikis do
end
end
end
+
+ context 'when wiki belongs to a group project' do
+ let(:project) { create(:project, namespace: group) }
+
+ before do
+ delete(api(url, user))
+ end
+
+ include_examples '204 No Content'
+ end
end
end
diff --git a/spec/serializers/event_entity_spec.rb b/spec/serializers/event_entity_spec.rb
deleted file mode 100644
index bb54597c967..00000000000
--- a/spec/serializers/event_entity_spec.rb
+++ /dev/null
@@ -1,13 +0,0 @@
-require 'spec_helper'
-
-describe EventEntity do
- subject { described_class.represent(create(:event)).as_json }
-
- it 'exposes author' do
- expect(subject).to include(:author)
- end
-
- it 'exposes core elements of event' do
- expect(subject).to include(:updated_at)
- end
-end
diff --git a/spec/serializers/merge_request_widget_entity_spec.rb b/spec/serializers/merge_request_widget_entity_spec.rb
index a5924a8589c..e25552eb0d8 100644
--- a/spec/serializers/merge_request_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_widget_entity_spec.rb
@@ -35,6 +35,81 @@ describe MergeRequestWidgetEntity do
end
end
+ describe 'metrics' do
+ context 'when metrics record exists with merged data' do
+ before do
+ resource.mark_as_merged!
+ resource.metrics.update!(merged_by: user)
+ end
+
+ it 'matches merge request metrics schema' do
+ expect(subject[:metrics].with_indifferent_access)
+ .to match_schema('entities/merge_request_metrics')
+ end
+
+ it 'returns values from metrics record' do
+ expect(subject.dig(:metrics, :merged_by, :id))
+ .to eq(resource.metrics.merged_by_id)
+ end
+ end
+
+ context 'when metrics record exists with closed data' do
+ before do
+ resource.close!
+ resource.metrics.update!(latest_closed_by: user)
+ end
+
+ it 'matches merge request metrics schema' do
+ expect(subject[:metrics].with_indifferent_access)
+ .to match_schema('entities/merge_request_metrics')
+ end
+
+ it 'returns values from metrics record' do
+ expect(subject.dig(:metrics, :closed_by, :id))
+ .to eq(resource.metrics.latest_closed_by_id)
+ end
+ end
+
+ context 'when metrics does not exists' do
+ before do
+ resource.mark_as_merged!
+ resource.metrics.destroy!
+ resource.reload
+ end
+
+ context 'when events exists' do
+ let!(:closed_event) { create(:event, :closed, project: project, target: resource) }
+ let!(:merge_event) { create(:event, :merged, project: project, target: resource) }
+
+ it 'matches merge request metrics schema' do
+ expect(subject[:metrics].with_indifferent_access)
+ .to match_schema('entities/merge_request_metrics')
+ end
+
+ it 'returns values from events record' do
+ expect(subject.dig(:metrics, :merged_by, :id))
+ .to eq(merge_event.author_id)
+
+ expect(subject.dig(:metrics, :closed_by, :id))
+ .to eq(closed_event.author_id)
+
+ expect(subject.dig(:metrics, :merged_at).to_s)
+ .to eq(merge_event.updated_at.to_s)
+
+ expect(subject.dig(:metrics, :closed_at).to_s)
+ .to eq(closed_event.updated_at.to_s)
+ end
+ end
+
+ context 'when events does not exists' do
+ it 'matches merge request metrics schema' do
+ expect(subject[:metrics].with_indifferent_access)
+ .to match_schema('entities/merge_request_metrics')
+ end
+ end
+ end
+ end
+
it 'has email_patches_path' do
expect(subject[:email_patches_path])
.to eq("/#{resource.project.full_path}/merge_requests/#{resource.iid}.patch")
diff --git a/spec/services/create_deployment_service_spec.rb b/spec/services/create_deployment_service_spec.rb
index 08267d6e6a0..b9bfbb11511 100644
--- a/spec/services/create_deployment_service_spec.rb
+++ b/spec/services/create_deployment_service_spec.rb
@@ -266,7 +266,7 @@ describe CreateDeploymentService do
context "while updating the 'first_deployed_to_production_at' time" do
before do
- merge_request.mark_as_merged
+ merge_request.metrics.update!(merged_at: Time.now)
end
context "for merge requests merged before the current deploy" do
diff --git a/spec/services/merge_requests/close_service_spec.rb b/spec/services/merge_requests/close_service_spec.rb
index 2a59bc4594a..4d12de3ecce 100644
--- a/spec/services/merge_requests/close_service_spec.rb
+++ b/spec/services/merge_requests/close_service_spec.rb
@@ -52,6 +52,19 @@ describe MergeRequests::CloseService do
end
end
+ it 'updates metrics' do
+ metrics = merge_request.metrics
+ metrics_service = double(MergeRequestMetricsService)
+ allow(MergeRequestMetricsService)
+ .to receive(:new)
+ .with(metrics)
+ .and_return(metrics_service)
+
+ expect(metrics_service).to receive(:close)
+
+ described_class.new(project, user, {}).execute(merge_request)
+ end
+
it 'refreshes the number of open merge requests for a valid MR', :use_clean_rails_memory_store_caching do
service = described_class.new(project, user, {})
diff --git a/spec/services/merge_requests/conflicts/list_service_spec.rb b/spec/services/merge_requests/conflicts/list_service_spec.rb
index 0b32c51a16f..6cadcd438c3 100644
--- a/spec/services/merge_requests/conflicts/list_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/list_service_spec.rb
@@ -32,14 +32,6 @@ describe MergeRequests::Conflicts::ListService do
expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_falsey
end
- it 'returns a falsey value when the MR has a missing ref after a force push' do
- merge_request = create_merge_request('conflict-resolvable')
- service = conflicts_service(merge_request)
- allow_any_instance_of(Rugged::Repository).to receive(:merge_commits).and_raise(Rugged::OdbError)
-
- expect(service.can_be_resolved_in_ui?).to be_falsey
- end
-
it 'returns a falsey value when the MR does not support new diff notes' do
merge_request = create_merge_request('conflict-resolvable')
merge_request.merge_request_diff.update_attributes(start_commit_sha: nil)
@@ -76,5 +68,23 @@ describe MergeRequests::Conflicts::ListService do
expect(conflicts_service(merge_request).can_be_resolved_in_ui?).to be_truthy
end
+
+ it 'returns a falsey value when the MR has a missing ref after a force push' do
+ merge_request = create_merge_request('conflict-resolvable')
+ service = conflicts_service(merge_request)
+ allow_any_instance_of(Gitlab::GitalyClient::ConflictsService).to receive(:list_conflict_files).and_raise(GRPC::Unknown)
+
+ expect(service.can_be_resolved_in_ui?).to be_falsey
+ end
+
+ context 'with gitaly disabled', :skip_gitaly_mock do
+ it 'returns a falsey value when the MR has a missing ref after a force push' do
+ merge_request = create_merge_request('conflict-resolvable')
+ service = conflicts_service(merge_request)
+ allow_any_instance_of(Rugged::Repository).to receive(:merge_commits).and_raise(Rugged::OdbError)
+
+ expect(service.can_be_resolved_in_ui?).to be_falsey
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/conflicts/resolve_service_spec.rb b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
index e28d8d7ae5c..cff09237005 100644
--- a/spec/services/merge_requests/conflicts/resolve_service_spec.rb
+++ b/spec/services/merge_requests/conflicts/resolve_service_spec.rb
@@ -111,15 +111,6 @@ describe MergeRequests::Conflicts::ResolveService do
described_class.new(merge_request_from_fork).execute(user, params)
end
- it 'gets conflicts from the source project' do
- # REFACTOR NOTE: We used to test that `project.repository.rugged` wasn't
- # used in this case, but since the refactor, for simplification,
- # we always use that repository for read only operations.
- expect(forked_project.repository.rugged).to receive(:merge_commits).and_call_original
-
- subject
- end
-
it 'creates a commit with the message' do
subject
@@ -132,6 +123,17 @@ describe MergeRequests::Conflicts::ResolveService do
expect(merge_request_from_fork.source_branch_head.parents.map(&:id))
.to eq(['404fa3fc7c2c9b5dacff102f353bdf55b1be2813', target_head])
end
+
+ context 'when gitaly is disabled', :skip_gitaly_mock do
+ it 'gets conflicts from the source project' do
+ # REFACTOR NOTE: We used to test that `project.repository.rugged` wasn't
+ # used in this case, but since the refactor, for simplification,
+ # we always use that repository for read only operations.
+ expect(forked_project.repository.rugged).to receive(:merge_commits).and_call_original
+
+ subject
+ end
+ end
end
end
diff --git a/spec/services/merge_requests/post_merge_service_spec.rb b/spec/services/merge_requests/post_merge_service_spec.rb
index 8f2c5df5907..70957431942 100644
--- a/spec/services/merge_requests/post_merge_service_spec.rb
+++ b/spec/services/merge_requests/post_merge_service_spec.rb
@@ -22,5 +22,18 @@ describe MergeRequests::PostMergeService do
expect { service.execute(merge_request) }
.to change { project.open_merge_requests_count }.from(1).to(0)
end
+
+ it 'updates metrics' do
+ metrics = merge_request.metrics
+ metrics_service = double(MergeRequestMetricsService)
+ allow(MergeRequestMetricsService)
+ .to receive(:new)
+ .with(metrics)
+ .and_return(metrics_service)
+
+ expect(metrics_service).to receive(:merge)
+
+ described_class.new(project, user, {}).execute(merge_request)
+ end
end
end
diff --git a/spec/services/merge_requests/reopen_service_spec.rb b/spec/services/merge_requests/reopen_service_spec.rb
index 94f31ff139c..a44d63e5f9f 100644
--- a/spec/services/merge_requests/reopen_service_spec.rb
+++ b/spec/services/merge_requests/reopen_service_spec.rb
@@ -47,6 +47,19 @@ describe MergeRequests::ReopenService do
end
end
+ it 'updates metrics' do
+ metrics = merge_request.metrics
+ service = double(MergeRequestMetricsService)
+ allow(MergeRequestMetricsService)
+ .to receive(:new)
+ .with(metrics)
+ .and_return(service)
+
+ expect(service).to receive(:reopen)
+
+ described_class.new(project, user, {}).execute(merge_request)
+ end
+
it 'refreshes the number of open merge requests for a valid MR' do
service = described_class.new(project, user, {})
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index dc89fdebce7..1833078f37c 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -252,6 +252,12 @@ describe Projects::CreateService, '#execute' do
end
end
+ it 'writes project full path to .git/config' do
+ project = create_project(user, opts)
+
+ expect(project.repo.config['gitlab.fullpath']).to eq project.full_path
+ end
+
def create_project(user, opts)
Projects::CreateService.new(user, opts).execute
end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 4057caca2ac..409d5de8d43 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -139,10 +139,10 @@ describe Projects::ForkService do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::INTERNAL])
end
- it "creates fork with highest allowed level" do
+ it "creates fork with lowest level" do
forked_project = fork_project(@from_project, @to_user)
- expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PUBLIC)
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
end
end
@@ -209,6 +209,19 @@ describe Projects::ForkService do
expect(to_project.errors[:path]).to eq(['has already been taken'])
end
end
+
+ context 'when the namespace has a lower visibility level than the project' do
+ it 'creates the project with the lower visibility level' do
+ public_project = create(:project, :public)
+ private_group = create(:group, :private)
+ group_owner = create(:user)
+ private_group.add_owner(group_owner)
+
+ forked_project = fork_project(public_project, group_owner, namespace: private_group)
+
+ expect(forked_project.visibility_level).to eq(Gitlab::VisibilityLevel::PRIVATE)
+ end
+ end
end
end
diff --git a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
index 3a3e47fd9c0..ded864beb1d 100644
--- a/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
+++ b/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb
@@ -2,7 +2,7 @@ require 'spec_helper'
describe Projects::HashedStorage::MigrateRepositoryService do
let(:gitlab_shell) { Gitlab::Shell.new }
- let(:project) { create(:project, :empty_repo, :wiki_repo) }
+ let(:project) { create(:project, :repository, :wiki_repo) }
let(:service) { described_class.new(project) }
let(:legacy_storage) { Storage::LegacyProject.new(project) }
let(:hashed_storage) { Storage::HashedProject.new(project) }
@@ -33,6 +33,12 @@ describe Projects::HashedStorage::MigrateRepositoryService do
service.execute
end
+
+ it 'writes project full path to .git/config' do
+ service.execute
+
+ expect(project.repo.config['gitlab.fullpath']).to eq project.full_path
+ end
end
context 'when one move fails' do
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 2b1337bee7e..7377c748698 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -54,6 +54,12 @@ describe Projects::TransferService do
expect(project.disk_path).not_to eq(old_path)
expect(project.disk_path).to start_with(group.path)
end
+
+ it 'updates project full path in .git/config' do
+ transfer_project(project, user, group)
+
+ expect(project.repo.config['gitlab.fullpath']).to eq "#{group.full_path}/#{project.path}"
+ end
end
context 'when transfer fails' do
@@ -86,6 +92,12 @@ describe Projects::TransferService do
expect(original_path).to eq current_path
end
+ it 'rolls back project full path in .git/config' do
+ attempt_project_transfer
+
+ expect(project.repo.config['gitlab.fullpath']).to eq project.full_path
+ end
+
it "doesn't send move notifications" do
expect_any_instance_of(NotificationService).not_to receive(:project_was_moved)
diff --git a/spec/services/projects/update_service_spec.rb b/spec/services/projects/update_service_spec.rb
index d887f70efae..fc6aa713d6f 100644
--- a/spec/services/projects/update_service_spec.rb
+++ b/spec/services/projects/update_service_spec.rb
@@ -61,7 +61,7 @@ describe Projects::UpdateService do
end
end
- context 'When project visibility is higher than parent group' do
+ context 'when project visibility is higher than parent group' do
let(:group) { create(:group, visibility_level: Gitlab::VisibilityLevel::INTERNAL) }
before do
diff --git a/spec/services/system_note_service_spec.rb b/spec/services/system_note_service_spec.rb
index 9025589ae0b..4e640a82dfc 100644
--- a/spec/services/system_note_service_spec.rb
+++ b/spec/services/system_note_service_spec.rb
@@ -2,6 +2,7 @@ require 'spec_helper'
describe SystemNoteService do
include Gitlab::Routing
+ include RepoHelpers
set(:group) { create(:group) }
set(:project) { create(:project, :repository, group: group) }
@@ -1070,17 +1071,32 @@ describe SystemNoteService do
let(:action) { 'outdated' }
end
- it 'creates a new note in the discussion' do
- # we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
- expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
+ context 'when the change_position is valid for the discussion' do
+ it 'creates a new note in the discussion' do
+ # we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
+ expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
+ end
+
+ it 'links to the diff in the system note' do
+ expect(subject.note).to include('version 1')
+
+ diff_id = merge_request.merge_request_diff.id
+ line_code = change_position.line_code(project.repository)
+ expect(subject.note).to include(diffs_project_merge_request_url(project, merge_request, diff_id: diff_id, anchor: line_code))
+ end
end
- it 'links to the diff in the system note' do
- expect(subject.note).to include('version 1')
+ context 'when the change_position is invalid for the discussion' do
+ let(:change_position) { project.commit(sample_commit.id) }
- diff_id = merge_request.merge_request_diff.id
- line_code = change_position.line_code(project.repository)
- expect(subject.note).to include(diffs_project_merge_request_url(project, merge_request, diff_id: diff_id, anchor: line_code))
+ it 'creates a new note in the discussion' do
+ # we need to completely rebuild the merge request object, or the `@discussions` on the merge request are not reloaded.
+ expect { subject }.to change { reloaded_merge_request.discussions.first.notes.size }.by(1)
+ end
+
+ it 'does not create a link' do
+ expect(subject.note).to eq('changed this line in version 1 of the diff')
+ end
end
end
diff --git a/spec/services/update_merge_request_metrics_service_spec.rb b/spec/services/update_merge_request_metrics_service_spec.rb
new file mode 100644
index 00000000000..b5fb999381d
--- /dev/null
+++ b/spec/services/update_merge_request_metrics_service_spec.rb
@@ -0,0 +1,42 @@
+require 'rails_helper'
+
+describe MergeRequestMetricsService do
+ let(:metrics) { create(:merge_request).metrics }
+
+ describe '#merge' do
+ it 'updates metrics' do
+ user = create(:user)
+ service = described_class.new(metrics)
+ event = double(Event, author_id: user.id, created_at: Time.now)
+
+ service.merge(event)
+
+ expect(metrics.merged_by).to eq(user)
+ expect(metrics.merged_at).to eq(event.created_at)
+ end
+ end
+
+ describe '#close' do
+ it 'updates metrics' do
+ user = create(:user)
+ service = described_class.new(metrics)
+ event = double(Event, author_id: user.id, created_at: Time.now)
+
+ service.close(event)
+
+ expect(metrics.latest_closed_by).to eq(user)
+ expect(metrics.latest_closed_at).to eq(event.created_at)
+ end
+ end
+
+ describe '#reopen' do
+ it 'updates metrics' do
+ service = described_class.new(metrics)
+
+ service.reopen
+
+ expect(metrics.latest_closed_by).to be_nil
+ expect(metrics.latest_closed_at).to be_nil
+ end
+ end
+end
diff --git a/spec/services/users/destroy_service_spec.rb b/spec/services/users/destroy_service_spec.rb
index 58a5bede3de..aeba9cd60bc 100644
--- a/spec/services/users/destroy_service_spec.rb
+++ b/spec/services/users/destroy_service_spec.rb
@@ -188,5 +188,22 @@ describe Users::DestroyService do
end
end
end
+
+ describe "calls the before/after callbacks" do
+ it 'of project_members' do
+ expect_any_instance_of(ProjectMember).to receive(:run_callbacks).with(:destroy).once
+
+ service.execute(user)
+ end
+
+ it 'of group_members' do
+ group_member = create(:group_member)
+ group_member.group.group_members.create(user: user, access_level: 40)
+
+ expect_any_instance_of(GroupMember).to receive(:run_callbacks).with(:destroy).once
+
+ service.execute(user)
+ end
+ end
end
end
diff --git a/spec/support/services_shared_context.rb b/spec/support/services_shared_context.rb
index 7457484a932..3f1fd169b72 100644
--- a/spec/support/services_shared_context.rb
+++ b/spec/support/services_shared_context.rb
@@ -29,5 +29,13 @@ Service.available_services_names.each do |service|
end
end
end
+
+ def initialize_service(service)
+ service_item = project.find_or_initialize_service(service)
+ service_item.properties = service_attrs
+ service_item.active = true if service == "kubernetes"
+ service_item.save
+ service_item
+ end
end
end
diff --git a/spec/support/test_env.rb b/spec/support/test_env.rb
index ffc051a3fff..1d99746b09f 100644
--- a/spec/support/test_env.rb
+++ b/spec/support/test_env.rb
@@ -215,7 +215,7 @@ module TestEnv
end
def copy_repo(project, bare_repo:, refs:)
- target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.full_path}.git")
+ target_repo_path = File.expand_path(project.repository_storage_path + "/#{project.disk_path}.git")
FileUtils.mkdir_p(target_repo_path)
FileUtils.cp_r("#{File.expand_path(bare_repo)}/.", target_repo_path)
FileUtils.chmod_R 0755, target_repo_path