Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2020-03-13 15:09:22 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2020-03-13 15:09:22 +0300
commit286fe61013674fe2d245ffc8d2233baf09923e70 (patch)
tree2037291f5863105e54e75be056b49f7d62007cae /spec
parent4cb5e5011abfe8d50ac3a7ebd0018c563c6d7af4 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec')
-rw-r--r--spec/controllers/admin/sessions_controller_spec.rb116
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb173
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb184
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb42
-rw-r--r--spec/features/admin/admin_mode_spec.rb4
-rw-r--r--spec/frontend/ide/components/commit_sidebar/editor_header_spec.js27
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js210
-rw-r--r--spec/frontend/logs/components/log_control_buttons_spec.js50
-rw-r--r--spec/frontend/logs/mock_data.js70
-rw-r--r--spec/frontend/logs/stores/actions_spec.js332
-rw-r--r--spec/frontend/logs/stores/mutations_spec.js136
-rw-r--r--spec/frontend/vue_shared/components/changed_file_icon_spec.js8
-rw-r--r--spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb4
-rw-r--r--spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb6
-rw-r--r--spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb4
-rw-r--r--spec/graphql/mutations/issues/set_confidential_spec.rb2
-rw-r--r--spec/graphql/mutations/issues/set_due_date_spec.rb2
-rw-r--r--spec/graphql/mutations/issues/update_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_assignees_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_labels_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_locked_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_milestone_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_subscription_spec.rb2
-rw-r--r--spec/graphql/mutations/merge_requests/set_wip_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/mark_all_done_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/mark_done_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/restore_many_spec.rb2
-rw-r--r--spec/graphql/mutations/todos/restore_spec.rb2
-rw-r--r--spec/helpers/releases_helper_spec.rb21
-rw-r--r--spec/javascripts/ide/components/commit_sidebar/form_spec.js2
-rw-r--r--spec/javascripts/ide/components/file_row_extra_spec.js30
-rw-r--r--spec/javascripts/ide/components/repo_commit_section_spec.js14
-rw-r--r--spec/javascripts/releases/components/app_index_spec.js123
-rw-r--r--spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb113
-rw-r--r--spec/lib/gitlab/sidekiq_queue_spec.rb14
-rw-r--r--spec/migrations/schedule_link_lfs_objects_projects_spec.rb76
-rw-r--r--spec/models/concerns/bulk_insert_safe_spec.rb31
-rw-r--r--spec/requests/api/admin/sidekiq_spec.rb12
-rw-r--r--spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb12
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb1
-rw-r--r--spec/support/helpers/graphql_helpers.rb4
-rw-r--r--spec/support/helpers/login_helpers.rb13
-rw-r--r--spec/views/admin/sessions/new.html.haml_spec.rb2
-rw-r--r--spec/views/admin/sessions/two_factor.html.haml_spec.rb41
-rw-r--r--spec/workers/authorized_projects_worker_spec.rb17
45 files changed, 1481 insertions, 437 deletions
diff --git a/spec/controllers/admin/sessions_controller_spec.rb b/spec/controllers/admin/sessions_controller_spec.rb
index 4bab6b51102..fabd79133ec 100644
--- a/spec/controllers/admin/sessions_controller_spec.rb
+++ b/spec/controllers/admin/sessions_controller_spec.rb
@@ -68,7 +68,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
# triggering the auth form will request admin mode
get :new
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(response).to redirect_to admin_root_path
expect(controller.current_user_mode.admin_mode?).to be(true)
@@ -82,7 +82,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
# triggering the auth form will request admin mode
get :new
- post :create, params: { password: '' }
+ post :create, params: { user: { password: '' } }
expect(response).to render_template :new
expect(controller.current_user_mode.admin_mode?).to be(false)
@@ -95,7 +95,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
# do not trigger the auth form
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(response).to redirect_to(new_admin_session_path)
expect(controller.current_user_mode.admin_mode?).to be(false)
@@ -110,12 +110,118 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
get :new
Timecop.freeze(Gitlab::Auth::CurrentUserMode::ADMIN_MODE_REQUESTED_GRACE_PERIOD.from_now) do
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(response).to redirect_to(new_admin_session_path)
expect(controller.current_user_mode.admin_mode?).to be(false)
end
end
+
+ context 'when using two-factor authentication via OTP' do
+ let(:user) { create(:admin, :two_factor) }
+
+ def authenticate_2fa(user_params)
+ post(:create, params: { user: user_params }, session: { otp_user_id: user.id })
+ end
+
+ it 'requests two factor after a valid password is provided' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ # triggering the auth form will request admin mode
+ get :new
+
+ post :create, params: { user: { password: user.password } }
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+
+ it 'can login with valid otp' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.store_location_for(:redirect, admin_root_path)
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa(otp_attempt: user.current_otp)
+
+ expect(response).to redirect_to admin_root_path
+ expect(controller.current_user_mode.admin_mode?).to be(true)
+ end
+
+ it 'cannot login with invalid otp' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa(otp_attempt: 'invalid')
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+
+ context 'with password authentication disabled' do
+ before do
+ stub_application_setting(password_authentication_enabled_for_web: false)
+ end
+
+ it 'allows 2FA stage of non-password login' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.store_location_for(:redirect, admin_root_path)
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa(otp_attempt: user.current_otp)
+
+ expect(response).to redirect_to admin_root_path
+ expect(controller.current_user_mode.admin_mode?).to be(true)
+ end
+ end
+ end
+
+ context 'when using two-factor authentication via U2F' do
+ let(:user) { create(:admin, :two_factor_via_u2f) }
+
+ def authenticate_2fa_u2f(user_params)
+ post(:create, params: { user: user_params }, session: { otp_user_id: user.id })
+ end
+
+ it 'requests two factor after a valid password is provided' do
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ # triggering the auth form will request admin mode
+ get :new
+ post :create, params: { user: { password: user.password } }
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+
+ it 'can login with valid auth' do
+ allow(U2fRegistration).to receive(:authenticate).and_return(true)
+
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.store_location_for(:redirect, admin_root_path)
+ controller.current_user_mode.request_admin_mode!
+
+ authenticate_2fa_u2f(login: user.username, device_response: '{}')
+
+ expect(response).to redirect_to admin_root_path
+ expect(controller.current_user_mode.admin_mode?).to be(true)
+ end
+
+ it 'cannot login with invalid auth' do
+ allow(U2fRegistration).to receive(:authenticate).and_return(false)
+
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+
+ controller.current_user_mode.request_admin_mode!
+ authenticate_2fa_u2f(login: user.username, device_response: '{}')
+
+ expect(response).to render_template('admin/sessions/two_factor')
+ expect(controller.current_user_mode.admin_mode?).to be(false)
+ end
+ end
end
end
@@ -136,7 +242,7 @@ describe Admin::SessionsController, :do_not_mock_admin_mode do
expect(controller.current_user_mode.admin_mode?).to be(false)
get :new
- post :create, params: { password: user.password }
+ post :create, params: { user: { password: user.password } }
expect(controller.current_user_mode.admin_mode?).to be(true)
post :destroy
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
new file mode 100644
index 00000000000..9d68104b755
--- /dev/null
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -0,0 +1,173 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Projects::Import::JiraController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+
+ context 'with anonymous user' do
+ before do
+ stub_feature_flags(jira_issue_import: true)
+ end
+
+ context 'get show' do
+ it 'redirects to issues page' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+
+ context 'post import' do
+ it 'redirects to issues page' do
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+
+ expect(response).to redirect_to(new_user_session_path)
+ end
+ end
+ end
+
+ context 'with logged in user' do
+ before do
+ sign_in(user)
+ project.add_maintainer(user)
+ end
+
+ context 'when feature flag not enabled' do
+ before do
+ stub_feature_flags(jira_issue_import: false)
+ end
+
+ context 'get show' do
+ it 'redirects to issues page' do
+ get :show, params: { namespace_id: project.namespace, project_id: project }
+
+ expect(response).to redirect_to(project_issues_path(project))
+ end
+ end
+
+ context 'post import' do
+ it 'redirects to issues page' do
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+
+ expect(response).to redirect_to(project_issues_path(project))
+ end
+ end
+ end
+
+ context 'when feature flag enabled' do
+ before do
+ stub_feature_flags(jira_issue_import: true)
+ end
+
+ context 'when jira service is enabled for the project' do
+ let_it_be(:jira_service) { create(:jira_service, project: project) }
+
+ context 'when running jira import first time' do
+ context 'get show' do
+ it 'renders show template' do
+ allow(JIRA::Resource::Project).to receive(:all).and_return([])
+ expect(project.import_state).to be_nil
+
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
+
+ expect(response).to render_template :show
+ end
+ end
+
+ context 'post import' do
+ it 'creates import state' do
+ expect(project.import_state).to be_nil
+
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+
+ project.reload
+
+ jira_project = project.import_data.data.dig('jira', 'projects').first
+ expect(project.import_type).to eq 'jira'
+ expect(project.import_state.status).to eq 'scheduled'
+ expect(jira_project['key']).to eq 'Test'
+ expect(response).to redirect_to(project_import_jira_path(project))
+ end
+ end
+ end
+
+ context 'when import state is scheduled' do
+ let_it_be(:import_state) { create(:import_state, project: project, status: :scheduled) }
+
+ context 'get show' do
+ it 'renders import status' do
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
+
+ expect(project.import_state.status).to eq 'scheduled'
+ expect(flash.now[:notice]).to eq 'Import scheduled'
+ end
+ end
+
+ context 'post import' do
+ before do
+ project.reload
+ project.create_import_data(
+ data: {
+ 'jira': {
+ 'projects': [{ 'key': 'Test', scheduled_at: 5.days.ago, scheduled_by: { user_id: user.id, name: user.name } }]
+ }
+ }
+ )
+ end
+
+ it 'uses the existing import data' do
+ expect(controller).not_to receive(:schedule_import)
+
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'New Project' }
+
+ expect(response).to redirect_to(project_import_jira_path(project))
+ end
+ end
+ end
+
+ context 'when jira import ran before' do
+ let_it_be(:import_state) { create(:import_state, project: project, status: :finished) }
+
+ context 'get show' do
+ it 'renders import status' do
+ allow(JIRA::Resource::Project).to receive(:all).and_return([])
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
+
+ expect(project.import_state.status).to eq 'finished'
+ expect(flash.now[:notice]).to eq 'Import finished'
+ end
+ end
+
+ context 'post import' do
+ before do
+ project.reload
+ project.create_import_data(
+ data: {
+ 'jira': {
+ 'projects': [{ 'key': 'Test', scheduled_at: 5.days.ago, scheduled_by: { user_id: user.id, name: user.name } }]
+ }
+ }
+ )
+ end
+
+ it 'uses the existing import data' do
+ expect(controller).to receive(:schedule_import).and_call_original
+
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'New Project' }
+
+ project.reload
+ expect(project.import_state.status).to eq 'scheduled'
+ jira_imported_projects = project.import_data.data.dig('jira', 'projects')
+ expect(jira_imported_projects.size).to eq 2
+ expect(jira_imported_projects.first['key']).to eq 'Test'
+ expect(jira_imported_projects.last['key']).to eq 'New Project'
+ expect(response).to redirect_to(project_import_jira_path(project))
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
new file mode 100644
index 00000000000..b8a910d3a40
--- /dev/null
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -0,0 +1,184 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Admin Mode Login', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
+ include TermsHelper
+ include UserLoginHelper
+
+ describe 'with two-factor authentication', :js do
+ def enter_code(code)
+ fill_in 'user_otp_attempt', with: code
+ click_button 'Verify code'
+ end
+
+ context 'with valid username/password' do
+ let(:user) { create(:admin, :two_factor) }
+
+ context 'using one-time code' do
+ it 'blocks login if we reuse the same code immediately' do
+ gitlab_sign_in(user, remember: true)
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ repeated_otp = user.current_otp
+ enter_code(repeated_otp)
+ gitlab_enable_admin_mode_sign_in(user)
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ enter_code(repeated_otp)
+
+ expect(current_path).to eq admin_session_path
+ expect(page).to have_content('Invalid two-factor code')
+ end
+
+ context 'not re-using codes' do
+ before do
+ gitlab_sign_in(user, remember: true)
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ enter_code(user.current_otp)
+ gitlab_enable_admin_mode_sign_in(user)
+
+ expect(page).to have_content('Two-Factor Authentication')
+ end
+
+ it 'allows login with valid code' do
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code(user.current_otp)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+
+ it 'blocks login with invalid code' do
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code('foo')
+
+ expect(page).to have_content('Invalid two-factor code')
+ end
+ end
+
+ it 'allows login with invalid code, then valid code' do
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code('foo')
+
+ expect(page).to have_content('Invalid two-factor code')
+
+ enter_code(user.current_otp)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+
+ context 'using backup code' do
+ let(:codes) { user.generate_otp_backup_codes! }
+
+ before do
+ expect(codes.size).to eq 10
+
+ # Ensure the generated codes get saved
+ user.save
+ end
+
+ context 'with valid code' do
+ it 'allows login' do
+ enter_code(codes.sample)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+
+ it 'invalidates the used code' do
+ expect { enter_code(codes.sample) }
+ .to change { user.reload.otp_backup_codes.size }.by(-1)
+ end
+ end
+
+ context 'with invalid code' do
+ it 'blocks login' do
+ code = codes.sample
+ expect(user.invalidate_otp_backup_code!(code)).to eq true
+
+ user.save!
+ expect(user.reload.otp_backup_codes.size).to eq 9
+
+ enter_code(code)
+
+ expect(page).to have_content('Invalid two-factor code.')
+ end
+ end
+ end
+ end
+ end
+
+ context 'when logging in via omniauth' do
+ let(:user) { create(:omniauth_user, :admin, :two_factor, extern_uid: 'my-uid', provider: 'saml')}
+ let(:mock_saml_response) do
+ File.read('spec/fixtures/authentication/saml_response.xml')
+ end
+
+ before do
+ stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'],
+ providers: [mock_saml_config_with_upstream_two_factor_authn_contexts])
+ end
+
+ context 'when authn_context is worth two factors' do
+ let(:mock_saml_response) do
+ File.read('spec/fixtures/authentication/saml_response.xml')
+ .gsub('urn:oasis:names:tc:SAML:2.0:ac:classes:Password',
+ 'urn:oasis:names:tc:SAML:2.0:ac:classes:SecondFactorOTPSMS')
+ end
+
+ it 'signs user in without prompting for second factor' do
+ sign_in_using_saml!
+
+ expect(page).not_to have_content('Two-Factor Authentication')
+
+ enable_admin_mode_using_saml!
+
+ expect(page).not_to have_content('Two-Factor Authentication')
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+
+ context 'when two factor authentication is required' do
+ it 'shows 2FA prompt after omniauth login' do
+ sign_in_using_saml!
+
+ expect(page).to have_content('Two-Factor Authentication')
+ enter_code(user.current_otp)
+
+ enable_admin_mode_using_saml!
+
+ expect(page).to have_content('Two-Factor Authentication')
+
+ # Cannot reuse the TOTP
+ Timecop.travel(30.seconds.from_now) do
+ enter_code(user.current_otp)
+
+ expect(current_path).to eq admin_root_path
+ expect(page).to have_content('Admin mode enabled')
+ end
+ end
+ end
+
+ def sign_in_using_saml!
+ gitlab_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
+
+ def enable_admin_mode_using_saml!
+ gitlab_enable_admin_mode_sign_in_via('saml', user, 'my-uid', mock_saml_response)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
new file mode 100644
index 00000000000..e1b4aba5724
--- /dev/null
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'Admin Mode Logout', :js, :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode do
+ include TermsHelper
+ include UserLoginHelper
+
+ let(:user) { create(:admin) }
+
+ before do
+ gitlab_sign_in(user)
+ gitlab_enable_admin_mode_sign_in(user)
+ visit admin_root_path
+ end
+
+ it 'disable removes admin mode and redirects to root page' do
+ gitlab_disable_admin_mode
+
+ expect(current_path).to eq root_path
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+
+ it 'disable shows flash notice' do
+ gitlab_disable_admin_mode
+
+ expect(page).to have_selector('.flash-notice')
+ end
+
+ context 'on a read-only instance' do
+ before do
+ allow(Gitlab::Database).to receive(:read_only?).and_return(true)
+ end
+
+ it 'disable removes admin mode and redirects to root page' do
+ gitlab_disable_admin_mode
+
+ expect(current_path).to eq root_path
+ expect(page).to have_link(href: new_admin_session_path)
+ end
+ end
+end
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index 7b8990aceef..f642d614a5d 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -45,7 +45,7 @@ describe 'Admin mode', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode
it 'can enter admin mode' do
visit new_admin_session_path
- fill_in 'password', with: admin.password
+ fill_in 'user_password', with: admin.password
click_button 'Enter Admin Mode'
@@ -60,7 +60,7 @@ describe 'Admin mode', :clean_gitlab_redis_shared_state, :do_not_mock_admin_mode
it 'can enter admin mode' do
visit new_admin_session_path
- fill_in 'password', with: admin.password
+ fill_in 'user_password', with: admin.password
click_button 'Enter Admin Mode'
diff --git a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
index 054e7492429..a25aba61516 100644
--- a/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/editor_header_spec.js
@@ -14,7 +14,6 @@ describe('IDE commit editor header', () => {
const findDiscardModal = () => wrapper.find({ ref: 'discardModal' });
const findDiscardButton = () => wrapper.find({ ref: 'discardButton' });
- const findActionButton = () => wrapper.find({ ref: 'actionButton' });
beforeEach(() => {
f = file('file');
@@ -28,9 +27,7 @@ describe('IDE commit editor header', () => {
},
});
- jest.spyOn(wrapper.vm, 'stageChange').mockImplementation();
- jest.spyOn(wrapper.vm, 'unstageChange').mockImplementation();
- jest.spyOn(wrapper.vm, 'discardFileChanges').mockImplementation();
+ jest.spyOn(wrapper.vm, 'discardChanges').mockImplementation();
});
afterEach(() => {
@@ -38,8 +35,8 @@ describe('IDE commit editor header', () => {
wrapper = null;
});
- it('renders button to discard & stage', () => {
- expect(wrapper.vm.$el.querySelectorAll('.btn').length).toBe(2);
+ it('renders button to discard', () => {
+ expect(wrapper.vm.$el.querySelectorAll('.btn')).toHaveLength(1);
});
describe('discard button', () => {
@@ -60,23 +57,7 @@ describe('IDE commit editor header', () => {
it('calls discardFileChanges if dialog result is confirmed', () => {
modal.vm.$emit('ok');
- expect(wrapper.vm.discardFileChanges).toHaveBeenCalledWith(f.path);
- });
- });
-
- describe('stage/unstage button', () => {
- it('unstages the file if it was already staged', () => {
- f.staged = true;
-
- findActionButton().trigger('click');
-
- expect(wrapper.vm.unstageChange).toHaveBeenCalledWith(f.path);
- });
-
- it('stages the file if it was not staged', () => {
- findActionButton().trigger('click');
-
- expect(wrapper.vm.stageChange).toHaveBeenCalledWith(f.path);
+ expect(wrapper.vm.discardChanges).toHaveBeenCalledWith(f.path);
});
});
});
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
index 26542c3d046..c638b4c05f9 100644
--- a/spec/frontend/logs/components/environment_logs_spec.js
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -1,5 +1,5 @@
import Vue from 'vue';
-import { GlDropdown, GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
+import { GlSprintf, GlDropdown, GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import EnvironmentLogs from '~/logs/components/environment_logs.vue';
@@ -20,9 +20,18 @@ import {
jest.mock('~/lib/utils/scroll_utils');
+const module = 'environmentLogs';
+
+jest.mock('lodash/throttle', () =>
+ jest.fn(func => {
+ return func;
+ }),
+);
+
describe('EnvironmentLogs', () => {
let EnvironmentLogsComponent;
let store;
+ let dispatch;
let wrapper;
let state;
@@ -32,14 +41,6 @@ describe('EnvironmentLogs', () => {
clusterApplicationsDocumentationPath: mockDocumentationPath,
};
- const actionMocks = {
- setInitData: jest.fn(),
- setSearch: jest.fn(),
- showPodLogs: jest.fn(),
- showEnvironment: jest.fn(),
- fetchEnvironments: jest.fn(),
- };
-
const updateControlBtnsMock = jest.fn();
const findEnvironmentsDropdown = () => wrapper.find('.js-environments-dropdown');
@@ -47,24 +48,25 @@ describe('EnvironmentLogs', () => {
const findSearchBar = () => wrapper.find('.js-logs-search');
const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
const findInfoAlert = () => wrapper.find('.js-elasticsearch-alert');
-
const findLogControlButtons = () => wrapper.find({ name: 'log-control-buttons-stub' });
+
+ const findInfiniteScroll = () => wrapper.find({ ref: 'infiniteScroll' });
const findLogTrace = () => wrapper.find('.js-log-trace');
+ const findLogFooter = () => wrapper.find({ ref: 'logFooter' });
+ const getInfiniteScrollAttr = attr => parseInt(findInfiniteScroll().attributes(attr), 10);
const mockSetInitData = () => {
state.pods.options = mockPods;
state.environments.current = mockEnvName;
[state.pods.current] = state.pods.options;
- state.logs.isComplete = false;
- state.logs.lines = mockLogsResult;
+ state.logs.lines = [];
};
- const mockShowPodLogs = podName => {
+ const mockShowPodLogs = () => {
state.pods.options = mockPods;
- [state.pods.current] = podName;
+ [state.pods.current] = mockPods;
- state.logs.isComplete = false;
state.logs.lines = mockLogsResult;
};
@@ -83,10 +85,21 @@ describe('EnvironmentLogs', () => {
methods: {
update: updateControlBtnsMock,
},
+ props: {
+ scrollDownButtonDisabled: false,
+ },
},
- },
- methods: {
- ...actionMocks,
+ GlInfiniteScroll: {
+ name: 'gl-infinite-scroll',
+ template: `
+ <div>
+ <slot name="header"></slot>
+ <slot name="items"></slot>
+ <slot></slot>
+ </div>
+ `,
+ },
+ GlSprintf,
},
});
};
@@ -95,12 +108,14 @@ describe('EnvironmentLogs', () => {
store = createStore();
state = store.state.environmentLogs;
EnvironmentLogsComponent = Vue.extend(EnvironmentLogs);
+
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+
+ dispatch = store.dispatch;
});
afterEach(() => {
- actionMocks.setInitData.mockReset();
- actionMocks.showPodLogs.mockReset();
- actionMocks.fetchEnvironments.mockReset();
+ store.dispatch.mockReset();
if (wrapper) {
wrapper.destroy();
@@ -124,14 +139,14 @@ describe('EnvironmentLogs', () => {
expect(findTimeRangePicker().is(DateTimePicker)).toBe(true);
// log trace
- expect(findLogTrace().isEmpty()).toBe(false);
+ expect(findInfiniteScroll().exists()).toBe(true);
+ expect(findLogTrace().exists()).toBe(true);
});
it('mounted inits data', () => {
initWrapper();
- expect(actionMocks.setInitData).toHaveBeenCalledTimes(1);
- expect(actionMocks.setInitData).toHaveBeenLastCalledWith({
+ expect(dispatch).toHaveBeenCalledWith(`${module}/setInitData`, {
timeRange: expect.objectContaining({
default: true,
}),
@@ -139,18 +154,15 @@ describe('EnvironmentLogs', () => {
podName: null,
});
- expect(actionMocks.fetchEnvironments).toHaveBeenCalledTimes(1);
- expect(actionMocks.fetchEnvironments).toHaveBeenLastCalledWith(mockEnvironmentsEndpoint);
+ expect(dispatch).toHaveBeenCalledWith(`${module}/fetchEnvironments`, mockEnvironmentsEndpoint);
});
describe('loading state', () => {
beforeEach(() => {
state.pods.options = [];
- state.logs = {
- lines: [],
- isLoading: true,
- };
+ state.logs.lines = [];
+ state.logs.isLoading = true;
state.environments = {
options: [],
@@ -183,6 +195,18 @@ describe('EnvironmentLogs', () => {
expect(updateControlBtnsMock).not.toHaveBeenCalled();
});
+ it('shows an infinite scroll with height and no content', () => {
+ expect(getInfiniteScrollAttr('max-list-height')).toBeGreaterThan(0);
+ expect(getInfiniteScrollAttr('fetched-items')).toBe(0);
+ });
+
+ it('shows an infinite scroll container with equal height and max-height ', () => {
+ const height = getInfiniteScrollAttr('max-list-height');
+
+ expect(height).toEqual(expect.any(Number));
+ expect(findInfiniteScroll().attributes('style')).toMatch(`height: ${height}px;`);
+ });
+
it('shows a logs trace', () => {
expect(findLogTrace().text()).toBe('');
expect(
@@ -193,14 +217,12 @@ describe('EnvironmentLogs', () => {
});
});
- describe('legacy environment', () => {
+ describe('k8s environment', () => {
beforeEach(() => {
state.pods.options = [];
- state.logs = {
- lines: [],
- isLoading: false,
- };
+ state.logs.lines = [];
+ state.logs.isLoading = false;
state.environments = {
options: mockEnvironments,
@@ -226,9 +248,16 @@ describe('EnvironmentLogs', () => {
describe('state with data', () => {
beforeEach(() => {
- actionMocks.setInitData.mockImplementation(mockSetInitData);
- actionMocks.showPodLogs.mockImplementation(mockShowPodLogs);
- actionMocks.fetchEnvironments.mockImplementation(mockFetchEnvs);
+ dispatch.mockImplementation(actionName => {
+ if (actionName === `${module}/setInitData`) {
+ mockSetInitData();
+ } else if (actionName === `${module}/showPodLogs`) {
+ mockShowPodLogs();
+ } else if (actionName === `${module}/fetchEnvironments`) {
+ mockFetchEnvs();
+ mockShowPodLogs();
+ }
+ });
initWrapper();
});
@@ -236,10 +265,6 @@ describe('EnvironmentLogs', () => {
afterEach(() => {
scrollDown.mockReset();
updateControlBtnsMock.mockReset();
-
- actionMocks.setInitData.mockReset();
- actionMocks.showPodLogs.mockReset();
- actionMocks.fetchEnvironments.mockReset();
});
it('displays an enabled search bar', () => {
@@ -249,8 +274,8 @@ describe('EnvironmentLogs', () => {
findSearchBar().vm.$emit('input', mockSearch);
findSearchBar().vm.$emit('submit');
- expect(actionMocks.setSearch).toHaveBeenCalledTimes(1);
- expect(actionMocks.setSearch).toHaveBeenCalledWith(mockSearch);
+ expect(dispatch).toHaveBeenCalledWith(`${module}/setInitData`, expect.any(Object));
+ expect(dispatch).toHaveBeenCalledWith(`${module}/setSearch`, mockSearch);
});
it('displays an enabled time window dropdown', () => {
@@ -282,18 +307,21 @@ describe('EnvironmentLogs', () => {
});
});
+ it('shows infinite scroll with height and no content', () => {
+ expect(getInfiniteScrollAttr('max-list-height')).toBeGreaterThan(0);
+ expect(getInfiniteScrollAttr('fetched-items')).toBe(mockTrace.length);
+ });
+
it('populates logs trace', () => {
const trace = findLogTrace();
expect(trace.text().split('\n').length).toBe(mockTrace.length);
expect(trace.text().split('\n')).toEqual(mockTrace);
});
- it('update control buttons state', () => {
- expect(updateControlBtnsMock).toHaveBeenCalledTimes(1);
- });
+ it('populates footer', () => {
+ const footer = findLogFooter().text();
- it('scrolls to bottom when loaded', () => {
- expect(scrollDown).toHaveBeenCalledTimes(1);
+ expect(footer).toContain(`${mockLogsResult.length} results`);
});
describe('when user clicks', () => {
@@ -301,33 +329,99 @@ describe('EnvironmentLogs', () => {
const items = findEnvironmentsDropdown().findAll(GlDropdownItem);
const index = 1; // any env
- expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(0);
+ expect(dispatch).not.toHaveBeenCalledWith(`${module}/showEnvironment`, expect.anything());
items.at(index).vm.$emit('click');
- expect(actionMocks.showEnvironment).toHaveBeenCalledTimes(1);
- expect(actionMocks.showEnvironment).toHaveBeenLastCalledWith(mockEnvironments[index].name);
+ expect(dispatch).toHaveBeenCalledWith(
+ `${module}/showEnvironment`,
+ mockEnvironments[index].name,
+ );
});
it('pod name, trace is refreshed', () => {
const items = findPodsDropdown().findAll(GlDropdownItem);
const index = 2; // any pod
- expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
+ expect(dispatch).not.toHaveBeenCalledWith(`${module}/showPodLogs`, expect.anything());
items.at(index).vm.$emit('click');
- expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
- expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPods[index]);
+ expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, mockPods[index]);
});
it('refresh button, trace is refreshed', () => {
- expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(0);
+ expect(dispatch).not.toHaveBeenCalledWith(`${module}/showPodLogs`, expect.anything());
findLogControlButtons().vm.$emit('refresh');
- expect(actionMocks.showPodLogs).toHaveBeenCalledTimes(1);
- expect(actionMocks.showPodLogs).toHaveBeenLastCalledWith(mockPodName);
+ expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, mockPodName);
+ });
+ });
+ });
+
+ describe('listeners', () => {
+ beforeEach(() => {
+ initWrapper();
+ });
+
+ it('attaches listeners in components', () => {
+ expect(findInfiniteScroll().vm.$listeners).toEqual({
+ topReached: expect.any(Function),
+ scroll: expect.any(Function),
+ });
+ });
+
+ it('`topReached` when not loading', () => {
+ expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+
+ findInfiniteScroll().vm.$emit('topReached');
+
+ expect(store.dispatch).toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+ });
+
+ it('`topReached` does not fetches more logs when already loading', () => {
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('topReached');
+
+ expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+ });
+
+ it('`topReached` fetches more logs', () => {
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('topReached');
+
+ expect(store.dispatch).not.toHaveBeenCalledWith(`${module}/fetchMoreLogsPrepend`, undefined);
+ });
+
+ it('`scroll` on a scrollable target results in enabled scroll buttons', () => {
+ const target = { scrollTop: 10, clientHeight: 10, scrollHeight: 21 };
+
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('scroll', { target });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(false);
+ });
+ });
+
+ it('`scroll` on a non-scrollable target in disabled scroll buttons', () => {
+ const target = { scrollTop: 10, clientHeight: 10, scrollHeight: 20 };
+
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('scroll', { target });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(true);
+ });
+ });
+
+ it('`scroll` on no target results in disabled scroll buttons', () => {
+ state.logs.isLoading = true;
+ findInfiniteScroll().vm.$emit('scroll', { target: undefined });
+
+ return wrapper.vm.$nextTick(() => {
+ expect(findLogControlButtons().props('scrollDownButtonDisabled')).toEqual(true);
});
});
});
diff --git a/spec/frontend/logs/components/log_control_buttons_spec.js b/spec/frontend/logs/components/log_control_buttons_spec.js
index f344e8189c3..38e568f569f 100644
--- a/spec/frontend/logs/components/log_control_buttons_spec.js
+++ b/spec/frontend/logs/components/log_control_buttons_spec.js
@@ -1,15 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { GlButton } from '@gitlab/ui';
import LogControlButtons from '~/logs/components/log_control_buttons.vue';
-import {
- canScroll,
- isScrolledToTop,
- isScrolledToBottom,
- scrollDown,
- scrollUp,
-} from '~/lib/utils/scroll_utils';
-
-jest.mock('~/lib/utils/scroll_utils');
describe('LogControlButtons', () => {
let wrapper;
@@ -18,8 +9,14 @@ describe('LogControlButtons', () => {
const findScrollToBottom = () => wrapper.find('.js-scroll-to-bottom');
const findRefreshBtn = () => wrapper.find('.js-refresh-log');
- const initWrapper = () => {
- wrapper = shallowMount(LogControlButtons);
+ const initWrapper = opts => {
+ wrapper = shallowMount(LogControlButtons, {
+ listeners: {
+ scrollUp: () => {},
+ scrollDown: () => {},
+ },
+ ...opts,
+ });
};
afterEach(() => {
@@ -55,27 +52,16 @@ describe('LogControlButtons', () => {
describe('when scrolling actions are enabled', () => {
beforeEach(() => {
// mock scrolled to the middle of a long page
- canScroll.mockReturnValue(true);
- isScrolledToBottom.mockReturnValue(false);
- isScrolledToTop.mockReturnValue(false);
-
initWrapper();
- wrapper.vm.update();
return wrapper.vm.$nextTick();
});
- afterEach(() => {
- canScroll.mockReset();
- isScrolledToTop.mockReset();
- isScrolledToBottom.mockReset();
- });
-
it('click on "scroll to top" scrolls up', () => {
expect(findScrollToTop().is('[disabled]')).toBe(false);
findScrollToTop().vm.$emit('click');
- expect(scrollUp).toHaveBeenCalledTimes(1);
+ expect(wrapper.emitted('scrollUp')).toHaveLength(1);
});
it('click on "scroll to bottom" scrolls down', () => {
@@ -83,25 +69,23 @@ describe('LogControlButtons', () => {
findScrollToBottom().vm.$emit('click');
- expect(scrollDown).toHaveBeenCalledTimes(1); // plus one time when trace was loaded
+ expect(wrapper.emitted('scrollDown')).toHaveLength(1);
});
});
describe('when scrolling actions are disabled', () => {
beforeEach(() => {
- // mock a short page without a scrollbar
- canScroll.mockReturnValue(false);
- isScrolledToBottom.mockReturnValue(true);
- isScrolledToTop.mockReturnValue(true);
-
- initWrapper();
+ initWrapper({ listeners: {} });
+ return wrapper.vm.$nextTick();
});
it('buttons are disabled', () => {
- wrapper.vm.update();
return wrapper.vm.$nextTick(() => {
- expect(findScrollToTop().is('[disabled]')).toBe(true);
- expect(findScrollToBottom().is('[disabled]')).toBe(true);
+ expect(findScrollToTop().exists()).toBe(false);
+ expect(findScrollToBottom().exists()).toBe(false);
+ // This should be enabled when gitlab-ui contains:
+ // https://gitlab.com/gitlab-org/gitlab-ui/-/merge_requests/1149
+ // expect(findScrollToBottom().is('[disabled]')).toBe(true);
});
});
});
diff --git a/spec/frontend/logs/mock_data.js b/spec/frontend/logs/mock_data.js
index 4c092a84b36..1a84d6edd12 100644
--- a/spec/frontend/logs/mock_data.js
+++ b/spec/frontend/logs/mock_data.js
@@ -1,14 +1,18 @@
-export const mockProjectPath = 'root/autodevops-deploy';
+const mockProjectPath = 'root/autodevops-deploy';
+
export const mockEnvName = 'production';
export const mockEnvironmentsEndpoint = `${mockProjectPath}/environments.json`;
export const mockEnvId = '99';
export const mockDocumentationPath = '/documentation.md';
+export const mockLogsEndpoint = '/dummy_logs_path.json';
+export const mockCursor = 'MOCK_CURSOR';
+export const mockNextCursor = 'MOCK_NEXT_CURSOR';
const makeMockEnvironment = (id, name, advancedQuerying) => ({
id,
project_path: mockProjectPath,
name,
- logs_api_path: '/dummy_logs_path.json',
+ logs_api_path: mockLogsEndpoint,
enable_advanced_logs_querying: advancedQuerying,
});
@@ -28,58 +32,22 @@ export const mockPods = [
];
export const mockLogsResult = [
- {
- timestamp: '2019-12-13T13:43:18.2760123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
- },
- { timestamp: '2019-12-13T13:43:18.2760123Z', message: '- -> /' },
- {
- timestamp: '2019-12-13T13:43:26.8420123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
- },
- { timestamp: '2019-12-13T13:43:26.8420123Z', message: '- -> /' },
- {
- timestamp: '2019-12-13T13:43:28.3710123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
- },
- { timestamp: '2019-12-13T13:43:28.3710123Z', message: '- -> /' },
- {
- timestamp: '2019-12-13T13:43:36.8860123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
- },
- { timestamp: '2019-12-13T13:43:36.8860123Z', message: '- -> /' },
- {
- timestamp: '2019-12-13T13:43:38.4000123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
- },
- { timestamp: '2019-12-13T13:43:38.4000123Z', message: '- -> /' },
- {
- timestamp: '2019-12-13T13:43:46.8420123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
- },
- { timestamp: '2019-12-13T13:43:46.8430123Z', message: '- -> /' },
- {
- timestamp: '2019-12-13T13:43:48.3240123Z',
- message: '10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
- },
- { timestamp: '2019-12-13T13:43:48.3250123Z', message: '- -> /' },
+ { timestamp: '2019-12-13T13:43:18.2760123Z', message: 'Log 1' },
+ { timestamp: '2019-12-13T13:43:18.2760123Z', message: 'Log 2' },
+ { timestamp: '2019-12-13T13:43:26.8420123Z', message: 'Log 3' },
];
export const mockTrace = [
- 'Dec 13 13:43:18.276Z | 10.36.0.1 - - [16/Oct/2019:06:29:48 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:18.276Z | - -> /',
- 'Dec 13 13:43:26.842Z | 10.36.0.1 - - [16/Oct/2019:06:29:57 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:26.842Z | - -> /',
- 'Dec 13 13:43:28.371Z | 10.36.0.1 - - [16/Oct/2019:06:29:58 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:28.371Z | - -> /',
- 'Dec 13 13:43:36.886Z | 10.36.0.1 - - [16/Oct/2019:06:30:07 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:36.886Z | - -> /',
- 'Dec 13 13:43:38.400Z | 10.36.0.1 - - [16/Oct/2019:06:30:08 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:38.400Z | - -> /',
- 'Dec 13 13:43:46.842Z | 10.36.0.1 - - [16/Oct/2019:06:30:17 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:46.843Z | - -> /',
- 'Dec 13 13:43:48.324Z | 10.36.0.1 - - [16/Oct/2019:06:30:18 UTC] "GET / HTTP/1.1" 200 13',
- 'Dec 13 13:43:48.325Z | - -> /',
+ 'Dec 13 13:43:18.276Z | Log 1',
+ 'Dec 13 13:43:18.276Z | Log 2',
+ 'Dec 13 13:43:26.842Z | Log 3',
];
+export const mockResponse = {
+ pod_name: mockPodName,
+ pods: mockPods,
+ logs: mockLogsResult,
+ cursor: mockNextCursor,
+};
+
export const mockSearch = 'foo +bar';
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
index 6309126159e..1512797e1bc 100644
--- a/spec/frontend/logs/stores/actions_spec.js
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -10,6 +10,7 @@ import {
showPodLogs,
fetchEnvironments,
fetchLogs,
+ fetchMoreLogsPrepend,
} from '~/logs/stores/actions';
import { defaultTimeRange } from '~/monitoring/constants';
@@ -18,7 +19,6 @@ import axios from '~/lib/utils/axios_utils';
import flash from '~/flash';
import {
- mockProjectPath,
mockPodName,
mockEnvironmentsEndpoint,
mockEnvironments,
@@ -26,6 +26,10 @@ import {
mockLogsResult,
mockEnvName,
mockSearch,
+ mockLogsEndpoint,
+ mockResponse,
+ mockCursor,
+ mockNextCursor,
} from '../mock_data';
jest.mock('~/flash');
@@ -52,6 +56,8 @@ describe('Logs Store actions', () => {
let state;
let mock;
+ const latestGetParams = () => mock.history.get[mock.history.get.length - 1].params;
+
convertToFixedRange.mockImplementation(range => {
if (range === defaultTimeRange) {
return { ...mockDefaultRange };
@@ -75,10 +81,16 @@ describe('Logs Store actions', () => {
describe('setInitData', () => {
it('should commit environment and pod name mutation', () =>
- testAction(setInitData, { environmentName: mockEnvName, podName: mockPodName }, state, [
- { type: types.SET_PROJECT_ENVIRONMENT, payload: mockEnvName },
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- ]));
+ testAction(
+ setInitData,
+ { timeRange: mockFixedRange, environmentName: mockEnvName, podName: mockPodName },
+ state,
+ [
+ { type: types.SET_TIME_RANGE, payload: mockFixedRange },
+ { type: types.SET_PROJECT_ENVIRONMENT, payload: mockEnvName },
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ ],
+ ));
});
describe('setSearch', () => {
@@ -140,183 +152,245 @@ describe('Logs Store actions', () => {
});
});
- describe('fetchLogs', () => {
+ describe('when the backend responds succesfully', () => {
+ let expectedMutations;
+ let expectedActions;
+
beforeEach(() => {
mock = new MockAdapter(axios);
+ mock.onGet(mockLogsEndpoint).reply(200, mockResponse);
+ mock.onGet(mockLogsEndpoint).replyOnce(202); // mock reactive cache
+
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
});
afterEach(() => {
mock.reset();
});
- it('should commit logs and pod data when there is pod name defined', () => {
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
- state.pods.current = mockPodName;
-
- const endpoint = '/dummy_logs_path.json';
-
- mock
- .onGet(endpoint, {
- params: {
- pod_name: mockPodName,
- ...mockDefaultRange,
- },
- })
- .reply(200, {
- pod_name: mockPodName,
- pods: mockPods,
- logs: mockLogsResult,
- });
-
- mock.onGet(endpoint).replyOnce(202); // mock reactive cache
-
- return testAction(
- fetchLogs,
- null,
- state,
- [
+ describe('fetchLogs', () => {
+ beforeEach(() => {
+ expectedMutations = [
{ type: types.REQUEST_PODS_DATA },
{ type: types.REQUEST_LOGS_DATA },
{ type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
{ type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
- { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
- ],
- [],
- );
- });
+ {
+ type: types.RECEIVE_LOGS_DATA_SUCCESS,
+ payload: { logs: mockLogsResult, cursor: mockNextCursor },
+ },
+ ];
- it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
- state.projectPath = mockProjectPath;
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
- state.pods.current = mockPodName;
- state.timeRange.current = mockFixedRange;
+ expectedActions = [];
+ });
- const endpoint = '/dummy_logs_path.json';
+ it('should commit logs and pod data when there is pod name defined', () => {
+ state.pods.current = mockPodName;
- mock
- .onGet(endpoint, {
- params: {
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toMatchObject({
pod_name: mockPodName,
- start: mockFixedRange.start,
- end: mockFixedRange.end,
- },
- })
- .reply(200, {
- pod_name: mockPodName,
- pods: mockPods,
- logs: mockLogsResult,
+ });
});
+ });
- return testAction(
- fetchLogs,
- null,
- state,
- [
- { type: types.REQUEST_PODS_DATA },
- { type: types.REQUEST_LOGS_DATA },
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
- { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
- ],
- [],
- );
- });
-
- it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
- state.pods.current = mockPodName;
- state.search = mockSearch;
- state.timeRange.current = 'INVALID_TIME_RANGE';
-
- const endpoint = '/dummy_logs_path.json';
+ it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
+ state.pods.current = mockPodName;
+ state.timeRange.current = mockFixedRange;
+ state.logs.cursor = mockCursor;
- mock
- .onGet(endpoint, {
- params: {
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toEqual({
pod_name: mockPodName,
- search: mockSearch,
- },
- })
- .reply(200, {
- pod_name: mockPodName,
- pods: mockPods,
- logs: mockLogsResult,
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
+ cursor: mockCursor,
+ });
});
+ });
- mock.onGet(endpoint).replyOnce(202); // mock reactive cache
+ it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
+ state.pods.current = mockPodName;
+ state.search = mockSearch;
+ state.timeRange.current = 'INVALID_TIME_RANGE';
- return testAction(
- fetchLogs,
- null,
- state,
- [
- { type: types.REQUEST_PODS_DATA },
- { type: types.REQUEST_LOGS_DATA },
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
- { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
- ],
- [],
- () => {
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toEqual({
+ pod_name: mockPodName,
+ search: mockSearch,
+ });
// Warning about time ranges was issued
expect(flash).toHaveBeenCalledTimes(1);
expect(flash).toHaveBeenCalledWith(expect.any(String), 'warning');
- },
- );
+ });
+ });
+
+ it('should commit logs and pod data when no pod name defined', () => {
+ state.timeRange.current = mockDefaultRange;
+
+ return testAction(fetchLogs, null, state, expectedMutations, expectedActions, () => {
+ expect(latestGetParams()).toEqual({});
+ });
+ });
});
- it('should commit logs and pod data when no pod name defined', done => {
- state.environments.options = mockEnvironments;
- state.environments.current = mockEnvName;
+ describe('fetchMoreLogsPrepend', () => {
+ beforeEach(() => {
+ expectedMutations = [
+ { type: types.REQUEST_LOGS_DATA_PREPEND },
+ {
+ type: types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS,
+ payload: { logs: mockLogsResult, cursor: mockNextCursor },
+ },
+ ];
- const endpoint = '/dummy_logs_path.json';
+ expectedActions = [];
+ });
- mock.onGet(endpoint, { params: { ...mockDefaultRange } }).reply(200, {
- pod_name: mockPodName,
- pods: mockPods,
- logs: mockLogsResult,
+ it('should commit logs and pod data when there is pod name defined', () => {
+ state.pods.current = mockPodName;
+
+ expectedActions = [];
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toMatchObject({
+ pod_name: mockPodName,
+ });
+ },
+ );
});
- mock.onGet(endpoint).replyOnce(202); // mock reactive cache
- testAction(
+ it('should commit logs and pod data when there is pod name defined and a non-default date range', () => {
+ state.pods.current = mockPodName;
+ state.timeRange.current = mockFixedRange;
+ state.logs.cursor = mockCursor;
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toEqual({
+ pod_name: mockPodName,
+ start: mockFixedRange.start,
+ end: mockFixedRange.end,
+ cursor: mockCursor,
+ });
+ },
+ );
+ });
+
+ it('should commit logs and pod data when there is pod name and search and a faulty date range', () => {
+ state.pods.current = mockPodName;
+ state.search = mockSearch;
+ state.timeRange.current = 'INVALID_TIME_RANGE';
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toEqual({
+ pod_name: mockPodName,
+ search: mockSearch,
+ });
+ // Warning about time ranges was issued
+ expect(flash).toHaveBeenCalledTimes(1);
+ expect(flash).toHaveBeenCalledWith(expect.any(String), 'warning');
+ },
+ );
+ });
+
+ it('should commit logs and pod data when no pod name defined', () => {
+ state.timeRange.current = mockDefaultRange;
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ expectedMutations,
+ expectedActions,
+ () => {
+ expect(latestGetParams()).toEqual({});
+ },
+ );
+ });
+
+ it('should not commit logs or pod data when it has reached the end', () => {
+ state.logs.isComplete = true;
+ state.logs.cursor = null;
+
+ return testAction(
+ fetchMoreLogsPrepend,
+ null,
+ state,
+ [], // no mutations done
+ [], // no actions dispatched
+ () => {
+ expect(mock.history.get).toHaveLength(0);
+ },
+ );
+ });
+ });
+ });
+
+ describe('when the backend responds with an error', () => {
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ mock.onGet(mockLogsEndpoint).reply(500);
+ });
+
+ afterEach(() => {
+ mock.reset();
+ });
+
+ it('fetchLogs should commit logs and pod errors', () => {
+ state.environments.options = mockEnvironments;
+ state.environments.current = mockEnvName;
+
+ return testAction(
fetchLogs,
null,
state,
[
{ type: types.REQUEST_PODS_DATA },
{ type: types.REQUEST_LOGS_DATA },
- { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
- { type: types.RECEIVE_PODS_DATA_SUCCESS, payload: mockPods },
- { type: types.RECEIVE_LOGS_DATA_SUCCESS, payload: mockLogsResult },
+ { type: types.RECEIVE_PODS_DATA_ERROR },
+ { type: types.RECEIVE_LOGS_DATA_ERROR },
],
[],
- done,
+ () => {
+ expect(mock.history.get[0].url).toBe(mockLogsEndpoint);
+ },
);
});
- it('should commit logs and pod errors when backend fails', () => {
+ it('fetchMoreLogsPrepend should commit logs and pod errors', () => {
state.environments.options = mockEnvironments;
state.environments.current = mockEnvName;
- const endpoint = `/${mockProjectPath}/-/logs/elasticsearch.json?environment_name=${mockEnvName}`;
- mock.onGet(endpoint).replyOnce(500);
-
return testAction(
- fetchLogs,
+ fetchMoreLogsPrepend,
null,
state,
[
- { type: types.REQUEST_PODS_DATA },
- { type: types.REQUEST_LOGS_DATA },
- { type: types.RECEIVE_PODS_DATA_ERROR },
- { type: types.RECEIVE_LOGS_DATA_ERROR },
+ { type: types.REQUEST_LOGS_DATA_PREPEND },
+ { type: types.RECEIVE_LOGS_DATA_PREPEND_ERROR },
],
[],
() => {
- expect(flash).toHaveBeenCalledTimes(1);
+ expect(mock.history.get[0].url).toBe(mockLogsEndpoint);
},
);
});
diff --git a/spec/frontend/logs/stores/mutations_spec.js b/spec/frontend/logs/stores/mutations_spec.js
index dcb358c7d5b..eae838a31d4 100644
--- a/spec/frontend/logs/stores/mutations_spec.js
+++ b/spec/frontend/logs/stores/mutations_spec.js
@@ -9,6 +9,8 @@ import {
mockPodName,
mockLogsResult,
mockSearch,
+ mockCursor,
+ mockNextCursor,
} from '../mock_data';
describe('Logs Store Mutations', () => {
@@ -73,27 +75,47 @@ describe('Logs Store Mutations', () => {
it('starts loading for logs', () => {
mutations[types.REQUEST_LOGS_DATA](state);
- expect(state.logs).toEqual(
- expect.objectContaining({
- lines: [],
- isLoading: true,
- isComplete: false,
- }),
- );
+ expect(state.timeRange.current).toEqual({
+ start: expect.any(String),
+ end: expect.any(String),
+ });
+
+ expect(state.logs).toEqual({
+ lines: [],
+ cursor: null,
+ isLoading: true,
+ isComplete: false,
+ });
});
});
describe('RECEIVE_LOGS_DATA_SUCCESS', () => {
- it('receives logs lines', () => {
- mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, mockLogsResult);
+ it('receives logs lines and cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockCursor,
+ });
- expect(state.logs).toEqual(
- expect.objectContaining({
- lines: mockLogsResult,
- isLoading: false,
- isComplete: true,
- }),
- );
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: mockCursor,
+ isComplete: false,
+ });
+ });
+
+ it('receives logs lines and a null cursor to indicate the end', () => {
+ mutations[types.RECEIVE_LOGS_DATA_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: null,
+ });
+
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: null,
+ isComplete: true,
+ });
});
});
@@ -101,13 +123,77 @@ describe('Logs Store Mutations', () => {
it('receives log data error and stops loading', () => {
mutations[types.RECEIVE_LOGS_DATA_ERROR](state);
- expect(state.logs).toEqual(
- expect.objectContaining({
- lines: [],
- isLoading: false,
- isComplete: true,
- }),
- );
+ expect(state.logs).toEqual({
+ lines: [],
+ isLoading: false,
+ cursor: null,
+ isComplete: false,
+ });
+ });
+ });
+
+ describe('REQUEST_LOGS_DATA_PREPEND', () => {
+ it('receives logs lines and cursor', () => {
+ mutations[types.REQUEST_LOGS_DATA_PREPEND](state);
+
+ expect(state.logs.isLoading).toBe(true);
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_PREPEND_SUCCESS', () => {
+ it('receives logs lines and cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockCursor,
+ });
+
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: mockCursor,
+ isComplete: false,
+ });
+ });
+
+ it('receives additional logs lines and a new cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockCursor,
+ });
+
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: mockNextCursor,
+ });
+
+ expect(state.logs).toEqual({
+ lines: [...mockLogsResult, ...mockLogsResult],
+ isLoading: false,
+ cursor: mockNextCursor,
+ isComplete: false,
+ });
+ });
+
+ it('receives logs lines and a null cursor to indicate is complete', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_SUCCESS](state, {
+ logs: mockLogsResult,
+ cursor: null,
+ });
+
+ expect(state.logs).toEqual({
+ lines: mockLogsResult,
+ isLoading: false,
+ cursor: null,
+ isComplete: true,
+ });
+ });
+ });
+
+ describe('RECEIVE_LOGS_DATA_PREPEND_ERROR', () => {
+ it('receives logs lines and cursor', () => {
+ mutations[types.RECEIVE_LOGS_DATA_PREPEND_ERROR](state);
+
+ expect(state.logs.isLoading).toBe(false);
});
});
@@ -121,6 +207,7 @@ describe('Logs Store Mutations', () => {
describe('SET_TIME_RANGE', () => {
it('sets a default range', () => {
+ expect(state.timeRange.selected).toEqual(expect.any(Object));
expect(state.timeRange.current).toEqual(expect.any(Object));
});
@@ -131,12 +218,13 @@ describe('Logs Store Mutations', () => {
};
mutations[types.SET_TIME_RANGE](state, mockRange);
+ expect(state.timeRange.selected).toEqual(mockRange);
expect(state.timeRange.current).toEqual(mockRange);
});
});
describe('REQUEST_PODS_DATA', () => {
- it('receives log data error and stops loading', () => {
+ it('receives pods data', () => {
mutations[types.REQUEST_PODS_DATA](state);
expect(state.pods).toEqual(
diff --git a/spec/frontend/vue_shared/components/changed_file_icon_spec.js b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
index 8258eb8204c..b77116be464 100644
--- a/spec/frontend/vue_shared/components/changed_file_icon_spec.js
+++ b/spec/frontend/vue_shared/components/changed_file_icon_spec.js
@@ -54,10 +54,10 @@ describe('Changed file icon', () => {
});
describe.each`
- file | iconName | tooltipText | desc
- ${changedFile()} | ${'file-modified'} | ${'Unstaged modification'} | ${'with file changed'}
- ${stagedFile()} | ${'file-modified-solid'} | ${'Staged modification'} | ${'with file staged'}
- ${newFile()} | ${'file-addition'} | ${'Unstaged addition'} | ${'with file new'}
+ file | iconName | tooltipText | desc
+ ${changedFile()} | ${'file-modified'} | ${'Modified'} | ${'with file changed'}
+ ${stagedFile()} | ${'file-modified-solid'} | ${'Modified'} | ${'with file staged'}
+ ${newFile()} | ${'file-addition'} | ${'Added'} | ${'with file new'}
`('$desc', ({ file, iconName, tooltipText }) => {
beforeEach(() => {
factory({ file });
diff --git a/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb b/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb
index fcc717f83a2..51d3c4f5d6b 100644
--- a/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb
+++ b/spec/graphql/mutations/concerns/mutations/resolves_group_spec.rb
@@ -11,12 +11,12 @@ describe Mutations::ResolvesGroup do
let(:context) { double }
- subject(:mutation) { mutation_class.new(object: nil, context: context) }
+ subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
it 'uses the GroupsResolver to resolve groups by path' do
group = create(:group)
- expect(Resolvers::GroupResolver).to receive(:new).with(object: nil, context: context).and_call_original
+ expect(Resolvers::GroupResolver).to receive(:new).with(object: nil, context: context, field: nil).and_call_original
expect(mutation.resolve_group(full_path: group.full_path).sync).to eq(group)
end
end
diff --git a/spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb b/spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb
index 064ad90f707..69db8d016d7 100644
--- a/spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb
+++ b/spec/graphql/mutations/concerns/mutations/resolves_issuable_spec.rb
@@ -12,7 +12,7 @@ describe Mutations::ResolvesIssuable do
let(:project) { create(:project) }
let(:user) { create(:user) }
let(:context) { { current_user: user } }
- let(:mutation) { mutation_class.new(object: nil, context: context) }
+ let(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
shared_examples 'resolving an issuable' do |type|
context 'when user has access' do
@@ -39,7 +39,7 @@ describe Mutations::ResolvesIssuable do
.and_return(resolved_project)
expect(resolver_class).to receive(:new)
- .with(object: resolved_project, context: context)
+ .with(object: resolved_project, context: context, field: nil)
.and_call_original
subject
@@ -47,7 +47,7 @@ describe Mutations::ResolvesIssuable do
it 'uses the ResolvesProject to resolve project' do
expect(Resolvers::ProjectResolver).to receive(:new)
- .with(object: nil, context: context)
+ .with(object: nil, context: context, field: nil)
.and_call_original
subject
diff --git a/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb b/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb
index 918e5fb016e..b5c349f6284 100644
--- a/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb
+++ b/spec/graphql/mutations/concerns/mutations/resolves_project_spec.rb
@@ -11,12 +11,12 @@ describe Mutations::ResolvesProject do
let(:context) { double }
- subject(:mutation) { mutation_class.new(object: nil, context: context) }
+ subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
it 'uses the ProjectsResolver to resolve projects by path' do
project = create(:project)
- expect(Resolvers::ProjectResolver).to receive(:new).with(object: nil, context: context).and_call_original
+ expect(Resolvers::ProjectResolver).to receive(:new).with(object: nil, context: context, field: nil).and_call_original
expect(mutation.resolve_project(full_path: project.full_path).sync).to eq(project)
end
end
diff --git a/spec/graphql/mutations/issues/set_confidential_spec.rb b/spec/graphql/mutations/issues/set_confidential_spec.rb
index a8f1fcdf7f1..6031953c869 100644
--- a/spec/graphql/mutations/issues/set_confidential_spec.rb
+++ b/spec/graphql/mutations/issues/set_confidential_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::Issues::SetConfidential do
let(:issue) { create(:issue) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:confidential) { true }
diff --git a/spec/graphql/mutations/issues/set_due_date_spec.rb b/spec/graphql/mutations/issues/set_due_date_spec.rb
index b45a7b460cd..73ba11fc551 100644
--- a/spec/graphql/mutations/issues/set_due_date_spec.rb
+++ b/spec/graphql/mutations/issues/set_due_date_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::Issues::SetDueDate do
let(:issue) { create(:issue) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:due_date) { 2.days.since }
diff --git a/spec/graphql/mutations/issues/update_spec.rb b/spec/graphql/mutations/issues/update_spec.rb
index 3d671680ccf..83bd3041cbf 100644
--- a/spec/graphql/mutations/issues/update_spec.rb
+++ b/spec/graphql/mutations/issues/update_spec.rb
@@ -13,7 +13,7 @@ describe Mutations::Issues::Update do
due_date: Date.tomorrow
}
end
- let(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
let(:mutated_issue) { subject[:issue] }
describe '#resolve' do
diff --git a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
index 2033ab57a0d..d88c5db05c9 100644
--- a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetAssignees do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:assignee) { create(:user) }
diff --git a/spec/graphql/mutations/merge_requests/set_labels_spec.rb b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
index f7c04a57f68..f58f35eb6f3 100644
--- a/spec/graphql/mutations/merge_requests/set_labels_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_labels_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetLabels do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:label) { create(:label, project: merge_request.project) }
diff --git a/spec/graphql/mutations/merge_requests/set_locked_spec.rb b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
index d35430abff1..12ae1314f22 100644
--- a/spec/graphql/mutations/merge_requests/set_locked_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_locked_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetLocked do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:locked) { true }
diff --git a/spec/graphql/mutations/merge_requests/set_milestone_spec.rb b/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
index d79b0a995d7..ad7f2df0842 100644
--- a/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_milestone_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetMilestone do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:milestone) { create(:milestone, project: merge_request.project) }
diff --git a/spec/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
index 286de6c0c97..a28bab363f3 100644
--- a/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -7,7 +7,7 @@ describe Mutations::MergeRequests::SetSubscription do
let(:project) { merge_request.project }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:subscribe) { true }
diff --git a/spec/graphql/mutations/merge_requests/set_wip_spec.rb b/spec/graphql/mutations/merge_requests/set_wip_spec.rb
index 490994c4577..9f0adcf117a 100644
--- a/spec/graphql/mutations/merge_requests/set_wip_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_wip_spec.rb
@@ -6,7 +6,7 @@ describe Mutations::MergeRequests::SetWip do
let(:merge_request) { create(:merge_request) }
let(:user) { create(:user) }
- subject(:mutation) { described_class.new(object: nil, context: { current_user: user }) }
+ subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
let(:wip) { true }
diff --git a/spec/graphql/mutations/todos/mark_all_done_spec.rb b/spec/graphql/mutations/todos/mark_all_done_spec.rb
index cce69d0dcdc..98b22a3e761 100644
--- a/spec/graphql/mutations/todos/mark_all_done_spec.rb
+++ b/spec/graphql/mutations/todos/mark_all_done_spec.rb
@@ -48,6 +48,6 @@ describe Mutations::Todos::MarkAllDone do
end
def mutation_for(user)
- described_class.new(object: nil, context: { current_user: user })
+ described_class.new(object: nil, context: { current_user: user }, field: nil)
end
end
diff --git a/spec/graphql/mutations/todos/mark_done_spec.rb b/spec/graphql/mutations/todos/mark_done_spec.rb
index ff61ef76db6..059ef3c8eee 100644
--- a/spec/graphql/mutations/todos/mark_done_spec.rb
+++ b/spec/graphql/mutations/todos/mark_done_spec.rb
@@ -14,7 +14,7 @@ describe Mutations::Todos::MarkDone do
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :pending) }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
describe '#resolve' do
it 'marks a single todo as done' do
diff --git a/spec/graphql/mutations/todos/restore_many_spec.rb b/spec/graphql/mutations/todos/restore_many_spec.rb
index 7821ce35a08..8f4a8985f9e 100644
--- a/spec/graphql/mutations/todos/restore_many_spec.rb
+++ b/spec/graphql/mutations/todos/restore_many_spec.rb
@@ -12,7 +12,7 @@ describe Mutations::Todos::RestoreMany do
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :done) }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
describe '#resolve' do
it 'restores a single todo' do
diff --git a/spec/graphql/mutations/todos/restore_spec.rb b/spec/graphql/mutations/todos/restore_spec.rb
index 76a2d4ffffd..1637acc2fb5 100644
--- a/spec/graphql/mutations/todos/restore_spec.rb
+++ b/spec/graphql/mutations/todos/restore_spec.rb
@@ -12,7 +12,7 @@ describe Mutations::Todos::Restore do
let_it_be(:other_user_todo) { create(:todo, user: other_user, author: author, state: :done) }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }) }
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
describe '#resolve' do
it 'restores a single todo' do
diff --git a/spec/helpers/releases_helper_spec.rb b/spec/helpers/releases_helper_spec.rb
index 3f56c189642..d9d6a324f09 100644
--- a/spec/helpers/releases_helper_spec.rb
+++ b/spec/helpers/releases_helper_spec.rb
@@ -18,16 +18,31 @@ describe ReleasesHelper do
context 'url helpers' do
let(:project) { build(:project, namespace: create(:group)) }
let(:release) { create(:release, project: project) }
+ let(:user) { create(:user) }
+ let(:can_user_create_release) { false }
+ let(:common_keys) { [:project_id, :illustration_path, :documentation_path] }
before do
helper.instance_variable_set(:@project, project)
helper.instance_variable_set(:@release, release)
+ allow(helper).to receive(:current_user).and_return(user)
+ allow(helper).to receive(:can?)
+ .with(user, :create_release, project)
+ .and_return(can_user_create_release)
end
describe '#data_for_releases_page' do
- it 'has the needed data to display release blocks' do
- keys = %i(project_id illustration_path documentation_path)
- expect(helper.data_for_releases_page.keys).to eq(keys)
+ it 'includes the required data for displaying release blocks' do
+ expect(helper.data_for_releases_page.keys).to contain_exactly(*common_keys)
+ end
+
+ context 'when the user is allowed to create a new release' do
+ let(:can_user_create_release) { true }
+
+ it 'includes new_release_path' do
+ expect(helper.data_for_releases_page.keys).to contain_exactly(*common_keys, :new_release_path)
+ expect(helper.data_for_releases_page[:new_release_path]).to eq(new_project_tag_path(project))
+ end
end
end
diff --git a/spec/javascripts/ide/components/commit_sidebar/form_spec.js b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
index 5cb804938ed..f5d1a9de59c 100644
--- a/spec/javascripts/ide/components/commit_sidebar/form_spec.js
+++ b/spec/javascripts/ide/components/commit_sidebar/form_spec.js
@@ -52,7 +52,7 @@ describe('IDE commit form', () => {
vm.$store.state.stagedFiles.push('test');
vm.$nextTick(() => {
- expect(vm.$el.querySelector('p').textContent).toContain('1 staged and 1 unstaged changes');
+ expect(vm.$el.querySelector('p').textContent).toContain('1 changed file');
done();
});
});
diff --git a/spec/javascripts/ide/components/file_row_extra_spec.js b/spec/javascripts/ide/components/file_row_extra_spec.js
index f498d8251c8..9fd014b50ef 100644
--- a/spec/javascripts/ide/components/file_row_extra_spec.js
+++ b/spec/javascripts/ide/components/file_row_extra_spec.js
@@ -41,30 +41,20 @@ describe('IDE extra file row component', () => {
describe('folderChangesTooltip', () => {
it('returns undefined when changes count is 0', () => {
- expect(vm.folderChangesTooltip).toBe(undefined);
- });
-
- it('returns unstaged changes text', () => {
- changesCount = 1;
- unstagedFilesCount = 1;
-
- expect(vm.folderChangesTooltip).toBe('1 unstaged change');
- });
+ changesCount = 0;
- it('returns staged changes text', () => {
- changesCount = 1;
- stagedFilesCount = 1;
-
- expect(vm.folderChangesTooltip).toBe('1 staged change');
+ expect(vm.folderChangesTooltip).toBe(undefined);
});
- it('returns staged and unstaged changes text', () => {
- changesCount = 1;
- stagedFilesCount = 1;
- unstagedFilesCount = 1;
+ [{ input: 1, output: '1 changed file' }, { input: 2, output: '2 changed files' }].forEach(
+ ({ input, output }) => {
+ it('returns changed files count if changes count is not 0', () => {
+ changesCount = input;
- expect(vm.folderChangesTooltip).toBe('1 staged and 1 unstaged changes');
- });
+ expect(vm.folderChangesTooltip).toBe(output);
+ });
+ },
+ );
});
describe('show tree changes count', () => {
diff --git a/spec/javascripts/ide/components/repo_commit_section_spec.js b/spec/javascripts/ide/components/repo_commit_section_spec.js
index 917eb1438bd..0ba8c86a036 100644
--- a/spec/javascripts/ide/components/repo_commit_section_spec.js
+++ b/spec/javascripts/ide/components/repo_commit_section_spec.js
@@ -30,19 +30,13 @@ describe('RepoCommitSection', () => {
const files = [file('file1'), file('file2')].map(f =>
Object.assign(f, {
type: 'blob',
+ content: 'orginal content',
}),
);
vm.$store.state.rightPanelCollapsed = false;
vm.$store.state.currentBranch = 'master';
- vm.$store.state.changedFiles = [...files];
- vm.$store.state.changedFiles.forEach(f =>
- Object.assign(f, {
- changed: true,
- content: 'changedFile testing',
- }),
- );
-
+ vm.$store.state.changedFiles = [];
vm.$store.state.stagedFiles = [{ ...files[0] }, { ...files[1] }];
vm.$store.state.stagedFiles.forEach(f =>
Object.assign(f, {
@@ -51,7 +45,7 @@ describe('RepoCommitSection', () => {
}),
);
- vm.$store.state.changedFiles.forEach(f => {
+ files.forEach(f => {
vm.$store.state.entries[f.path] = f;
});
@@ -96,7 +90,7 @@ describe('RepoCommitSection', () => {
const changedFileElements = [...vm.$el.querySelectorAll('.multi-file-commit-list > li')];
const allFiles = vm.$store.state.changedFiles.concat(vm.$store.state.stagedFiles);
- expect(changedFileElements.length).toEqual(4);
+ expect(changedFileElements).toHaveLength(2);
changedFileElements.forEach((changedFile, i) => {
expect(changedFile.textContent.trim()).toContain(allFiles[i].path);
diff --git a/spec/javascripts/releases/components/app_index_spec.js b/spec/javascripts/releases/components/app_index_spec.js
index 83b0652d59b..020937d07e5 100644
--- a/spec/javascripts/releases/components/app_index_spec.js
+++ b/spec/javascripts/releases/components/app_index_spec.js
@@ -13,6 +13,7 @@ import {
releases,
} from '../mock_data';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import waitForPromises from 'spec/helpers/wait_for_promises';
describe('Releases App ', () => {
const Component = Vue.extend(app);
@@ -22,7 +23,7 @@ describe('Releases App ', () => {
const props = {
projectId: 'gitlab-ce',
- documentationLink: 'help/releases',
+ documentationPath: 'help/releases',
illustrationPath: 'illustration/path',
};
@@ -51,9 +52,9 @@ describe('Releases App ', () => {
expect(vm.$el.querySelector('.js-success-state')).toBeNull();
expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
- setTimeout(() => {
- done();
- }, 0);
+ waitForPromises()
+ .then(done)
+ .catch(done.fail);
});
});
@@ -66,14 +67,16 @@ describe('Releases App ', () => {
});
it('renders success state', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-loading')).toBeNull();
- expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
- expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
- expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
-
- done();
- }, 0);
+ waitForPromises()
+ .then(() => {
+ expect(vm.$el.querySelector('.js-loading')).toBeNull();
+ expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
+ expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
+ expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
});
});
@@ -86,14 +89,16 @@ describe('Releases App ', () => {
});
it('renders success state', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-loading')).toBeNull();
- expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
- expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
- expect(vm.$el.querySelector('.gl-pagination')).not.toBeNull();
-
- done();
- }, 0);
+ waitForPromises()
+ .then(() => {
+ expect(vm.$el.querySelector('.js-loading')).toBeNull();
+ expect(vm.$el.querySelector('.js-empty-state')).toBeNull();
+ expect(vm.$el.querySelector('.js-success-state')).not.toBeNull();
+ expect(vm.$el.querySelector('.gl-pagination')).not.toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
});
});
@@ -104,14 +109,76 @@ describe('Releases App ', () => {
});
it('renders empty state', done => {
- setTimeout(() => {
- expect(vm.$el.querySelector('.js-loading')).toBeNull();
- expect(vm.$el.querySelector('.js-empty-state')).not.toBeNull();
- expect(vm.$el.querySelector('.js-success-state')).toBeNull();
- expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
-
- done();
- }, 0);
+ waitForPromises()
+ .then(() => {
+ expect(vm.$el.querySelector('.js-loading')).toBeNull();
+ expect(vm.$el.querySelector('.js-empty-state')).not.toBeNull();
+ expect(vm.$el.querySelector('.js-success-state')).toBeNull();
+ expect(vm.$el.querySelector('.gl-pagination')).toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('"New release" button', () => {
+ const findNewReleaseButton = () => vm.$el.querySelector('.js-new-release-btn');
+
+ beforeEach(() => {
+ spyOn(api, 'releases').and.returnValue(Promise.resolve({ data: [], headers: {} }));
+ });
+
+ const factory = additionalProps => {
+ vm = mountComponentWithStore(Component, {
+ props: {
+ ...props,
+ ...additionalProps,
+ },
+ store,
+ });
+ };
+
+ describe('when the user is allowed to create a new Release', () => {
+ const newReleasePath = 'path/to/new/release';
+
+ beforeEach(() => {
+ factory({ newReleasePath });
+ });
+
+ it('renders the "New release" button', done => {
+ waitForPromises()
+ .then(() => {
+ expect(findNewReleaseButton()).not.toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
+ });
+
+ it('renders the "New release" button with the correct href', done => {
+ waitForPromises()
+ .then(() => {
+ expect(findNewReleaseButton().getAttribute('href')).toBe(newReleasePath);
+
+ done();
+ })
+ .catch(done.fail);
+ });
+ });
+
+ describe('when the user is not allowed to create a new Release', () => {
+ beforeEach(() => factory());
+
+ it('does not render the "New release" button', done => {
+ waitForPromises()
+ .then(() => {
+ expect(findNewReleaseButton()).toBeNull();
+
+ done();
+ })
+ .catch(done.fail);
+ });
});
});
});
diff --git a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb
new file mode 100644
index 00000000000..5700cac2e0f
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb
@@ -0,0 +1,113 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::LinkLfsObjectsProjects, :migration, schema: 2020_03_10_075115 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:fork_networks) { table(:fork_networks) }
+ let(:fork_network_members) { table(:fork_network_members) }
+ let(:lfs_objects) { table(:lfs_objects) }
+ let(:lfs_objects_projects) { table(:lfs_objects_projects) }
+
+ let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
+
+ let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
+ let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
+
+ let(:source_project) { projects.create(namespace_id: namespace.id) }
+ let(:another_source_project) { projects.create(namespace_id: namespace.id) }
+ let(:project) { projects.create(namespace_id: namespace.id) }
+ let(:another_project) { projects.create(namespace_id: namespace.id) }
+ let(:partially_linked_project) { projects.create(namespace_id: namespace.id) }
+ let(:fully_linked_project) { projects.create(namespace_id: namespace.id) }
+
+ let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
+ let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
+
+ let!(:source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ # Create links between projects
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
+
+ [project, partially_linked_project, fully_linked_project].each do |p|
+ fork_network_members.create(
+ fork_network_id: fork_network.id,
+ project_id: p.id,
+ forked_from_project_id: fork_network.root_project_id
+ )
+ end
+
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_project.id, forked_from_project_id: another_fork_network.root_project_id)
+
+ # Links LFS objects to some projects
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: fully_linked_project.id)
+ lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: fully_linked_project.id)
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: partially_linked_project.id)
+ end
+
+ context 'when there are LFS objects to be linked' do
+ it 'creates LfsObjectsProject records for forks based on the specified range of LfsObjectProject id' do
+ expect_next_instance_of(Gitlab::BackgroundMigration::Logger) do |logger|
+ expect(logger).to receive(:info).exactly(4).times
+ end
+
+ expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.to change { lfs_objects_projects.count }.by(5)
+
+ expect(lfs_object_ids_for(project)).to match_array(lfs_object_ids_for(source_project))
+ expect(lfs_object_ids_for(another_project)).to match_array(lfs_object_ids_for(another_source_project))
+ expect(lfs_object_ids_for(partially_linked_project)).to match_array(lfs_object_ids_for(source_project))
+
+ expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }.not_to change { lfs_objects_projects.count }
+ end
+ end
+
+ context 'when there are no LFS objects to be linked' do
+ before do
+ # Links LFS objects to all projects
+ projects.all.each do |p|
+ lfs_objects_projects.create(lfs_object_id: lfs_object.id, project_id: p.id)
+ lfs_objects_projects.create(lfs_object_id: another_lfs_object.id, project_id: p.id)
+ end
+ end
+
+ it 'does not create LfsObjectProject records' do
+ expect { subject.perform(source_project_lop_1.id, another_source_project_lop_2.id) }
+ .not_to change { lfs_objects_projects.count }
+ end
+ end
+
+ def lfs_object_ids_for(project)
+ lfs_objects_projects.where(project_id: project.id).pluck(:lfs_object_id)
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_queue_spec.rb b/spec/lib/gitlab/sidekiq_queue_spec.rb
index 9516ea10511..f5be8d9bfed 100644
--- a/spec/lib/gitlab/sidekiq_queue_spec.rb
+++ b/spec/lib/gitlab/sidekiq_queue_spec.rb
@@ -2,18 +2,18 @@
require 'spec_helper'
-describe Gitlab::SidekiqQueue do
+describe Gitlab::SidekiqQueue, :clean_gitlab_redis_queues do
around do |example|
Sidekiq::Queue.new('authorized_projects').clear
Sidekiq::Testing.disable!(&example)
Sidekiq::Queue.new('authorized_projects').clear
end
- def add_job(user)
+ def add_job(user, args)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
- 'args' => [user.id],
+ 'args' => args,
'meta.user' => user.username
)
end
@@ -24,9 +24,9 @@ describe Gitlab::SidekiqQueue do
let_it_be(:sidekiq_queue_user) { create(:user) }
before do
- add_job(create(:user))
- add_job(sidekiq_queue_user)
- add_job(sidekiq_queue_user)
+ add_job(create(:user), [1])
+ add_job(sidekiq_queue_user, [2])
+ add_job(sidekiq_queue_user, [3])
end
context 'when the queue is not processed in time' do
@@ -70,7 +70,7 @@ describe Gitlab::SidekiqQueue do
context 'when there are no valid metadata keys passed' do
it 'raises NoMetadataError' do
- add_job(create(:user))
+ add_job(create(:user), [1])
expect { described_class.new('authorized_projects').drop_jobs!({ username: 'sidekiq_queue_user' }, timeout: 1) }
.to raise_error(described_class::NoMetadataError)
diff --git a/spec/migrations/schedule_link_lfs_objects_projects_spec.rb b/spec/migrations/schedule_link_lfs_objects_projects_spec.rb
new file mode 100644
index 00000000000..055ab3cdd83
--- /dev/null
+++ b/spec/migrations/schedule_link_lfs_objects_projects_spec.rb
@@ -0,0 +1,76 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200310075115_schedule_link_lfs_objects_projects.rb')
+
+describe ScheduleLinkLfsObjectsProjects, :migration, :sidekiq do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:fork_networks) { table(:fork_networks) }
+ let(:fork_network_members) { table(:fork_network_members) }
+ let(:lfs_objects) { table(:lfs_objects) }
+ let(:lfs_objects_projects) { table(:lfs_objects_projects) }
+
+ let(:namespace) { namespaces.create(name: 'GitLab', path: 'gitlab') }
+
+ let(:fork_network) { fork_networks.create(root_project_id: source_project.id) }
+ let(:another_fork_network) { fork_networks.create(root_project_id: another_source_project.id) }
+
+ let(:source_project) { projects.create(namespace_id: namespace.id) }
+ let(:another_source_project) { projects.create(namespace_id: namespace.id) }
+ let(:project) { projects.create(namespace_id: namespace.id) }
+ let(:another_project) { projects.create(namespace_id: namespace.id) }
+
+ let(:lfs_object) { lfs_objects.create(oid: 'abc123', size: 100) }
+ let(:another_lfs_object) { lfs_objects.create(oid: 'def456', size: 200) }
+
+ let!(:source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_1) do
+ lfs_objects_projects.create(
+ lfs_object_id: lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ let!(:another_source_project_lop_2) do
+ lfs_objects_projects.create(
+ lfs_object_id: another_lfs_object.id,
+ project_id: another_source_project.id
+ )
+ end
+
+ before do
+ stub_const("#{described_class.name}::BATCH_SIZE", 2)
+
+ # Create links between projects
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: fork_network.id, project_id: project.id, forked_from_project_id: source_project.id)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_source_project.id, forked_from_project_id: nil)
+ fork_network_members.create(fork_network_id: another_fork_network.id, project_id: another_project.id, forked_from_project_id: another_fork_network.root_project_id)
+ end
+
+ it 'schedules background migration to link LFS objects' do
+ Sidekiq::Testing.fake! do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(2.minutes, source_project_lop_1.id, source_project_lop_2.id)
+ expect(described_class::MIGRATION)
+ .to be_scheduled_delayed_migration(4.minutes, another_source_project_lop_1.id, another_source_project_lop_2.id)
+ end
+ end
+end
diff --git a/spec/models/concerns/bulk_insert_safe_spec.rb b/spec/models/concerns/bulk_insert_safe_spec.rb
index 9ebaedcf252..4969327132a 100644
--- a/spec/models/concerns/bulk_insert_safe_spec.rb
+++ b/spec/models/concerns/bulk_insert_safe_spec.rb
@@ -6,7 +6,19 @@ describe BulkInsertSafe do
class BulkInsertItem < ApplicationRecord
include BulkInsertSafe
- validates :name, presence: true
+ validates :name, :enum_value, :secret_value, presence: true
+
+ ENUM_VALUES = {
+ case_1: 1
+ }.freeze
+
+ enum enum_value: ENUM_VALUES
+
+ attr_encrypted :secret_value,
+ mode: :per_attribute_iv,
+ algorithm: 'aes-256-gcm',
+ key: Settings.attr_encrypted_db_key_base_32,
+ insecure_mode: false
end
module InheritedUnsafeMethods
@@ -29,8 +41,13 @@ describe BulkInsertSafe do
ActiveRecord::Schema.define do
create_table :bulk_insert_items, force: true do |t|
t.string :name, null: true
+ t.integer :enum_value, null: false
+ t.text :encrypted_secret_value, null: false
+ t.string :encrypted_secret_value_iv, null: false
end
end
+
+ BulkInsertItem.reset_column_information
end
after(:all) do
@@ -41,13 +58,21 @@ describe BulkInsertSafe do
def build_valid_items_for_bulk_insertion
Array.new(10) do |n|
- BulkInsertItem.new(name: "item-#{n}")
+ BulkInsertItem.new(
+ name: "item-#{n}",
+ enum_value: 'case_1',
+ secret_value: "my-secret"
+ )
end
end
def build_invalid_items_for_bulk_insertion
Array.new(10) do
- BulkInsertItem.new # requires `name` to be set
+ BulkInsertItem.new(
+ name: nil, # requires `name` to be set
+ enum_value: 'case_1',
+ secret_value: "my-secret"
+ )
end
end
diff --git a/spec/requests/api/admin/sidekiq_spec.rb b/spec/requests/api/admin/sidekiq_spec.rb
index 0fb8199eec6..303b62f4436 100644
--- a/spec/requests/api/admin/sidekiq_spec.rb
+++ b/spec/requests/api/admin/sidekiq_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe API::Admin::Sidekiq do
+describe API::Admin::Sidekiq, :clean_gitlab_redis_queues do
let_it_be(:admin) { create(:admin) }
describe 'DELETE /admin/sidekiq/queues/:queue_name' do
@@ -21,20 +21,20 @@ describe API::Admin::Sidekiq do
Sidekiq::Queue.new('authorized_projects').clear
end
- def add_job(user)
+ def add_job(user, args)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
- 'args' => [user.id],
+ 'args' => args,
'meta.user' => user.username
)
end
context 'valid request' do
it 'returns info about the deleted jobs' do
- add_job(admin)
- add_job(admin)
- add_job(create(:user))
+ add_job(admin, [1])
+ add_job(admin, [2])
+ add_job(create(:user), [3])
delete api("/admin/sidekiq/queues/authorized_projects?user=#{admin.username}", admin)
diff --git a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
index 9451587fac3..a5159da84f3 100644
--- a/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
+++ b/spec/requests/api/graphql/mutations/admin/sidekiq_queues/delete_jobs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe 'Deleting Sidekiq jobs' do
+describe 'Deleting Sidekiq jobs', :clean_gitlab_redis_queues do
include GraphqlHelpers
let_it_be(:admin) { create(:admin) }
@@ -31,19 +31,19 @@ describe 'Deleting Sidekiq jobs' do
Sidekiq::Queue.new('authorized_projects').clear
end
- def add_job(user)
+ def add_job(user, args)
Sidekiq::Client.push(
'class' => 'AuthorizedProjectsWorker',
'queue' => 'authorized_projects',
- 'args' => [user.id],
+ 'args' => args,
'meta.user' => user.username
)
end
it 'returns info about the deleted jobs' do
- add_job(admin)
- add_job(admin)
- add_job(create(:user))
+ add_job(admin, [1])
+ add_job(admin, [2])
+ add_job(create(:user), [3])
post_graphql_mutation(mutation, current_user: admin)
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 7745a78a806..3b819c795b2 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -985,6 +985,7 @@ describe Ci::CreatePipelineService do
expect(pipeline).to be_persisted
expect(build).to be_kind_of(Ci::Build)
expect(build.options).to eq(config[:release].except(:stage, :only).with_indifferent_access)
+ expect(build).to be_persisted
end
end
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 35b1b802f35..370162b45f0 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -12,8 +12,8 @@ module GraphqlHelpers
end
# Run a loader's named resolver
- def resolve(resolver_class, obj: nil, args: {}, ctx: {})
- resolver_class.new(object: obj, context: ctx).resolve(args)
+ def resolve(resolver_class, obj: nil, args: {}, ctx: {}, field: nil)
+ resolver_class.new(object: obj, context: ctx, field: field).resolve(args)
end
# Eagerly run a loader's named resolver
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 1d42f26ad3e..6a4dcfcdb1e 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -51,7 +51,7 @@ module LoginHelpers
def gitlab_enable_admin_mode_sign_in(user)
visit new_admin_session_path
- fill_in 'password', with: user.password
+ fill_in 'user_password', with: user.password
click_button 'Enter Admin Mode'
end
@@ -62,6 +62,12 @@ module LoginHelpers
click_link provider
end
+ def gitlab_enable_admin_mode_sign_in_via(provider, user, uid, saml_response = nil)
+ mock_auth_hash_with_saml_xml(provider, uid, user.email, saml_response)
+ visit new_admin_session_path
+ click_link provider
+ end
+
# Requires Javascript driver.
def gitlab_sign_out
find(".header-user-dropdown-toggle").click
@@ -71,6 +77,11 @@ module LoginHelpers
expect(page).to have_button('Sign in')
end
+ # Requires Javascript driver.
+ def gitlab_disable_admin_mode
+ click_on 'Leave Admin Mode'
+ end
+
private
# Private: Login as the specified user
diff --git a/spec/views/admin/sessions/new.html.haml_spec.rb b/spec/views/admin/sessions/new.html.haml_spec.rb
index b3208296c80..05601e5471e 100644
--- a/spec/views/admin/sessions/new.html.haml_spec.rb
+++ b/spec/views/admin/sessions/new.html.haml_spec.rb
@@ -15,7 +15,7 @@ describe 'admin/sessions/new.html.haml' do
render
expect(rendered).to have_css('#login-pane.active')
- expect(rendered).to have_selector('input[name="password"]')
+ expect(rendered).to have_selector('input[name="user[password]"]')
end
it 'warns authentication not possible if password not set' do
diff --git a/spec/views/admin/sessions/two_factor.html.haml_spec.rb b/spec/views/admin/sessions/two_factor.html.haml_spec.rb
new file mode 100644
index 00000000000..2c061c7707b
--- /dev/null
+++ b/spec/views/admin/sessions/two_factor.html.haml_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'admin/sessions/two_factor.html.haml' do
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ end
+
+ context 'user has no two factor auth' do
+ let(:user) { create(:admin) }
+
+ it 'shows tab' do
+ render
+
+ expect(rendered).to have_no_field('user[otp_attempt]')
+ expect(rendered).to have_no_field('user[device_response]')
+ end
+ end
+
+ context 'user has otp active' do
+ let(:user) { create(:admin, :two_factor) }
+
+ it 'shows enter otp form' do
+ render
+
+ expect(rendered).to have_css('#login-pane.active')
+ expect(rendered).to have_field('user[otp_attempt]')
+ end
+ end
+
+ context 'user has u2f active' do
+ let(:user) { create(:admin, :two_factor_via_u2f) }
+
+ it 'shows enter u2f form' do
+ render
+
+ expect(rendered).to have_css('#js-login-2fa-device.btn')
+ end
+ end
+end
diff --git a/spec/workers/authorized_projects_worker_spec.rb b/spec/workers/authorized_projects_worker_spec.rb
index 4c02278de64..8ce0d4edd4f 100644
--- a/spec/workers/authorized_projects_worker_spec.rb
+++ b/spec/workers/authorized_projects_worker_spec.rb
@@ -21,5 +21,22 @@ describe AuthorizedProjectsWorker do
job.perform(-1)
end
end
+
+ it_behaves_like "an idempotent worker" do
+ let(:job_args) { user.id }
+
+ it "does not change authorizations when run twice" do
+ group = create(:group)
+ create(:project, namespace: group)
+ group.add_developer(user)
+
+ # Delete the authorization created by the after save hook of the member
+ # created above.
+ user.project_authorizations.delete_all
+
+ expect { job.perform(user.id) }.to change { user.project_authorizations.reload.size }.by(1)
+ expect { job.perform(user.id) }.not_to change { user.project_authorizations.reload.size }
+ end
+ end
end
end