diff options
Diffstat (limited to 'spec/lib')
291 files changed, 11258 insertions, 6137 deletions
diff --git a/spec/lib/api/ci/helpers/runner_spec.rb b/spec/lib/api/ci/helpers/runner_spec.rb index cc871d66d40..37277e7dcbd 100644 --- a/spec/lib/api/ci/helpers/runner_spec.rb +++ b/spec/lib/api/ci/helpers/runner_spec.rb @@ -15,7 +15,7 @@ RSpec.describe API::Ci::Helpers::Runner do it 'handles sticking of a build when a build ID is specified' do allow(helper).to receive(:params).and_return(id: build.id) - expect(ApplicationRecord.sticking) + expect(Ci::Build.sticking) .to receive(:stick_or_unstick_request) .with({}, :build, build.id) @@ -25,7 +25,7 @@ RSpec.describe API::Ci::Helpers::Runner do it 'does not handle sticking if no build ID was specified' do allow(helper).to receive(:params).and_return({}) - expect(ApplicationRecord.sticking) + expect(Ci::Build.sticking) .not_to receive(:stick_or_unstick_request) helper.current_job @@ -44,7 +44,7 @@ RSpec.describe API::Ci::Helpers::Runner do it 'handles sticking of a runner if a token is specified' do allow(helper).to receive(:params).and_return(token: runner.token) - expect(ApplicationRecord.sticking) + expect(Ci::Runner.sticking) .to receive(:stick_or_unstick_request) .with({}, :runner, runner.token) @@ -54,7 +54,7 @@ RSpec.describe API::Ci::Helpers::Runner do it 'does not handle sticking if no token was specified' do allow(helper).to receive(:params).and_return({}) - expect(ApplicationRecord.sticking) + expect(Ci::Runner.sticking) .not_to receive(:stick_or_unstick_request) helper.current_runner diff --git a/spec/lib/api/entities/projects/topic_spec.rb b/spec/lib/api/entities/projects/topic_spec.rb new file mode 100644 index 00000000000..cdf142dbb7d --- /dev/null +++ b/spec/lib/api/entities/projects/topic_spec.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe API::Entities::Projects::Topic do + let(:topic) { create(:topic) } + + subject { described_class.new(topic).as_json } + + it 'exposes correct attributes' do + expect(subject).to include( + :id, + :name, + :description, + :total_projects_count, + :avatar_url + ) + end +end diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb index 37e040a422b..2277bd78e86 100644 --- a/spec/lib/api/helpers_spec.rb +++ b/spec/lib/api/helpers_spec.rb @@ -351,12 +351,14 @@ RSpec.describe API::Helpers do let(:send_git_blob) do subject.send(:send_git_blob, repository, blob) + subject.header end before do allow(subject).to receive(:env).and_return({}) allow(subject).to receive(:content_type) allow(subject).to receive(:header).and_return({}) + allow(subject).to receive(:body).and_return('') allow(Gitlab::Workhorse).to receive(:send_git_blob) end diff --git a/spec/lib/atlassian/jira_connect/client_spec.rb b/spec/lib/atlassian/jira_connect/client_spec.rb index 5c8d4282118..9201d1c5dcb 100644 --- a/spec/lib/atlassian/jira_connect/client_spec.rb +++ b/spec/lib/atlassian/jira_connect/client_spec.rb @@ -18,7 +18,15 @@ RSpec.describe Atlassian::JiraConnect::Client do end end - describe '.generate_update_sequence_id' do + around do |example| + if example.metadata[:skip_freeze_time] + example.run + else + freeze_time { example.run } + end + end + + describe '.generate_update_sequence_id', :skip_freeze_time do it 'returns unix time in microseconds as integer', :aggregate_failures do travel_to(Time.utc(1970, 1, 1, 0, 0, 1)) do expect(described_class.generate_update_sequence_id).to eq(1000) diff --git a/spec/lib/banzai/filter/emoji_filter_spec.rb b/spec/lib/banzai/filter/emoji_filter_spec.rb index cb0b470eaa1..d621f63211b 100644 --- a/spec/lib/banzai/filter/emoji_filter_spec.rb +++ b/spec/lib/banzai/filter/emoji_filter_spec.rb @@ -28,9 +28,9 @@ RSpec.describe Banzai::Filter::EmojiFilter do it 'replaces name versions of trademark, copyright, and registered trademark' do doc = filter('<p>:tm: :copyright: :registered:</p>') - expect(doc.css('gl-emoji')[0].text).to eq '™' - expect(doc.css('gl-emoji')[1].text).to eq '©' - expect(doc.css('gl-emoji')[2].text).to eq '®' + expect(doc.css('gl-emoji')[0].text).to eq '™️' + expect(doc.css('gl-emoji')[1].text).to eq '©️' + expect(doc.css('gl-emoji')[2].text).to eq '®️' end it 'correctly encodes the URL' do diff --git a/spec/lib/banzai/filter/footnote_filter_spec.rb b/spec/lib/banzai/filter/footnote_filter_spec.rb index 01b7319fab1..54faa748d53 100644 --- a/spec/lib/banzai/filter/footnote_filter_spec.rb +++ b/spec/lib/banzai/filter/footnote_filter_spec.rb @@ -5,34 +5,42 @@ require 'spec_helper' RSpec.describe Banzai::Filter::FootnoteFilter do include FilterSpecHelper - # first[^1] and second[^second] + # rubocop:disable Style/AsciiComments + # first[^1] and second[^second] and third[^_😄_] # [^1]: one # [^second]: two + # [^_😄_]: three + # rubocop:enable Style/AsciiComments let(:footnote) do - <<~EOF - <p>first<sup><a href="#fn1" id="fnref1">1</a></sup> and second<sup><a href="#fn2" id="fnref2">2</a></sup></p> - <p>same reference<sup><a href="#fn1" id="fnref1">1</a></sup></p> + <<~EOF.strip_heredoc + <p>first<sup><a href="#fn-1" id="fnref-1">1</a></sup> and second<sup><a href="#fn-second" id="fnref-second">2</a></sup> and third<sup><a href="#fn-_%F0%9F%98%84_" id="fnref-_%F0%9F%98%84_">3</a></sup></p> + <ol> - <li id="fn1"> - <p>one <a href="#fnref1">↩</a></p> + <li id="fn-1"> + <p>one <a href="#fnref-1" aria-label="Back to content">↩</a></p> </li> - <li id="fn2"> - <p>two <a href="#fnref2">↩</a></p> + <li id="fn-second"> + <p>two <a href="#fnref-second" aria-label="Back to content">↩</a></p> + </li>\n<li id="fn-_%F0%9F%98%84_"> + <p>three <a href="#fnref-_%F0%9F%98%84_" aria-label="Back to content">↩</a></p> </li> </ol> EOF end let(:filtered_footnote) do - <<~EOF - <p>first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup></p> - <p>same reference<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup></p> - <section class="footnotes"><ol> - <li id="fn1-#{identifier}"> - <p>one <a href="#fnref1-#{identifier}" class="footnote-backref">↩</a></p> + <<~EOF.strip_heredoc + <p>first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref="">1</a></sup> and second<sup class="footnote-ref"><a href="#fn-second-#{identifier}" id="fnref-second-#{identifier}" data-footnote-ref="">2</a></sup> and third<sup class="footnote-ref"><a href="#fn-_%F0%9F%98%84_-#{identifier}" id="fnref-_%F0%9F%98%84_-#{identifier}" data-footnote-ref="">3</a></sup></p> + + <section class=\"footnotes\" data-footnotes><ol> + <li id="fn-1-#{identifier}"> + <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p> + </li> + <li id="fn-second-#{identifier}"> + <p>two <a href="#fnref-second-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p> </li> - <li id="fn2-#{identifier}"> - <p>two <a href="#fnref2-#{identifier}" class="footnote-backref">↩</a></p> + <li id="fn-_%F0%9F%98%84_-#{identifier}"> + <p>three <a href="#fnref-_%F0%9F%98%84_-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref="">↩</a></p> </li> </ol></section> EOF @@ -41,10 +49,56 @@ RSpec.describe Banzai::Filter::FootnoteFilter do context 'when footnotes exist' do let(:doc) { filter(footnote) } let(:link_node) { doc.css('sup > a').first } - let(:identifier) { link_node[:id].delete_prefix('fnref1-') } + let(:identifier) { link_node[:id].delete_prefix('fnref-1-') } it 'properly adds the necessary ids and classes' do expect(doc.to_html).to eq filtered_footnote end + + context 'using ruby-based HTML renderer' do + # first[^1] and second[^second] + # [^1]: one + # [^second]: two + let(:footnote) do + <<~EOF + <p>first<sup><a href="#fn1" id="fnref1">1</a></sup> and second<sup><a href="#fn2" id="fnref2">2</a></sup></p> + <p>same reference<sup><a href="#fn1" id="fnref1">1</a></sup></p> + <ol> + <li id="fn1"> + <p>one <a href="#fnref1">↩</a></p> + </li> + <li id="fn2"> + <p>two <a href="#fnref2">↩</a></p> + </li> + </ol> + EOF + end + + let(:filtered_footnote) do + <<~EOF + <p>first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup></p> + <p>same reference<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup></p> + <section class="footnotes"><ol> + <li id="fn1-#{identifier}"> + <p>one <a href="#fnref1-#{identifier}" class="footnote-backref">↩</a></p> + </li> + <li id="fn2-#{identifier}"> + <p>two <a href="#fnref2-#{identifier}" class="footnote-backref">↩</a></p> + </li> + </ol></section> + EOF + end + + let(:doc) { filter(footnote) } + let(:identifier) { link_node[:id].delete_prefix('fnref1-') } + + before do + stub_feature_flags(use_cmark_renderer: false) + end + + it 'properly adds the necessary ids and classes' do + expect(doc.to_html).to eq filtered_footnote + end + end end end diff --git a/spec/lib/banzai/filter/markdown_filter_spec.rb b/spec/lib/banzai/filter/markdown_filter_spec.rb index c5e84a0c1e7..a310de5c015 100644 --- a/spec/lib/banzai/filter/markdown_filter_spec.rb +++ b/spec/lib/banzai/filter/markdown_filter_spec.rb @@ -5,90 +5,125 @@ require 'spec_helper' RSpec.describe Banzai::Filter::MarkdownFilter do include FilterSpecHelper - describe 'markdown engine from context' do - it 'defaults to CommonMark' do - expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance| - expect(instance).to receive(:render).and_return('test') + shared_examples_for 'renders correct markdown' do + describe 'markdown engine from context' do + it 'defaults to CommonMark' do + expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance| + expect(instance).to receive(:render).and_return('test') + end + + filter('test') end - filter('test') - end + it 'uses CommonMark' do + expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance| + expect(instance).to receive(:render).and_return('test') + end - it 'uses CommonMark' do - expect_next_instance_of(Banzai::Filter::MarkdownEngines::CommonMark) do |instance| - expect(instance).to receive(:render).and_return('test') + filter('test', { markdown_engine: :common_mark }) end - - filter('test', { markdown_engine: :common_mark }) end - end - describe 'code block' do - context 'using CommonMark' do - before do - stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark) + describe 'code block' do + context 'using CommonMark' do + before do + stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark) + end + + it 'adds language to lang attribute when specified' do + result = filter("```html\nsome code\n```", no_sourcepos: true) + + if Feature.enabled?(:use_cmark_renderer) + expect(result).to start_with('<pre lang="html"><code>') + else + expect(result).to start_with('<pre><code lang="html">') + end + end + + it 'does not add language to lang attribute when not specified' do + result = filter("```\nsome code\n```", no_sourcepos: true) + + expect(result).to start_with('<pre><code>') + end + + it 'works with utf8 chars in language' do + result = filter("```日\nsome code\n```", no_sourcepos: true) + + if Feature.enabled?(:use_cmark_renderer) + expect(result).to start_with('<pre lang="日"><code>') + else + expect(result).to start_with('<pre><code lang="日">') + end + end + + it 'works with additional language parameters' do + result = filter("```ruby:red gem foo\nsome code\n```", no_sourcepos: true) + + if Feature.enabled?(:use_cmark_renderer) + expect(result).to start_with('<pre lang="ruby:red" data-meta="gem foo"><code>') + else + expect(result).to start_with('<pre><code lang="ruby:red gem foo">') + end + end end + end - it 'adds language to lang attribute when specified' do - result = filter("```html\nsome code\n```", no_sourcepos: true) - - expect(result).to start_with('<pre><code lang="html">') - end - - it 'does not add language to lang attribute when not specified' do - result = filter("```\nsome code\n```", no_sourcepos: true) - - expect(result).to start_with('<pre><code>') - end + describe 'source line position' do + context 'using CommonMark' do + before do + stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark) + end - it 'works with utf8 chars in language' do - result = filter("```日\nsome code\n```", no_sourcepos: true) + it 'defaults to add data-sourcepos' do + result = filter('test') - expect(result).to start_with('<pre><code lang="日">') - end + expect(result).to eq '<p data-sourcepos="1:1-1:4">test</p>' + end - it 'works with additional language parameters' do - result = filter("```ruby:red gem\nsome code\n```", no_sourcepos: true) + it 'disables data-sourcepos' do + result = filter('test', no_sourcepos: true) - expect(result).to start_with('<pre><code lang="ruby:red gem">') + expect(result).to eq '<p>test</p>' + end end end - end - describe 'source line position' do - context 'using CommonMark' do - before do - stub_const('Banzai::Filter::MarkdownFilter::DEFAULT_ENGINE', :common_mark) - end + describe 'footnotes in tables' do + it 'processes footnotes in table cells' do + text = <<-MD.strip_heredoc + | Column1 | + | --------- | + | foot [^1] | - it 'defaults to add data-sourcepos' do - result = filter('test') + [^1]: a footnote + MD - expect(result).to eq '<p data-sourcepos="1:1-1:4">test</p>' - end + result = filter(text, no_sourcepos: true) - it 'disables data-sourcepos' do - result = filter('test', no_sourcepos: true) + expect(result).to include('<td>foot <sup') - expect(result).to eq '<p>test</p>' + if Feature.enabled?(:use_cmark_renderer) + expect(result).to include('<section class="footnotes" data-footnotes>') + else + expect(result).to include('<section class="footnotes">') + end end end end - describe 'footnotes in tables' do - it 'processes footnotes in table cells' do - text = <<-MD.strip_heredoc - | Column1 | - | --------- | - | foot [^1] | - - [^1]: a footnote - MD + context 'using ruby-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: false) + end - result = filter(text, no_sourcepos: true) + it_behaves_like 'renders correct markdown' + end - expect(result).to include('<td>foot <sup') - expect(result).to include('<section class="footnotes">') + context 'using c-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: true) end + + it_behaves_like 'renders correct markdown' end end diff --git a/spec/lib/banzai/filter/plantuml_filter_spec.rb b/spec/lib/banzai/filter/plantuml_filter_spec.rb index 5ad94c74514..d1a3b5689a8 100644 --- a/spec/lib/banzai/filter/plantuml_filter_spec.rb +++ b/spec/lib/banzai/filter/plantuml_filter_spec.rb @@ -5,30 +5,67 @@ require 'spec_helper' RSpec.describe Banzai::Filter::PlantumlFilter do include FilterSpecHelper - it 'replaces plantuml pre tag with img tag' do - stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080") - input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' - output = '<div class="imageblock"><div class="content"><img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq"></div></div>' - doc = filter(input) + shared_examples_for 'renders correct markdown' do + it 'replaces plantuml pre tag with img tag' do + stub_application_setting(plantuml_enabled: true, plantuml_url: "http://localhost:8080") - expect(doc.to_s).to eq output + input = if Feature.enabled?(:use_cmark_renderer) + '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' + else + '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' + end + + output = '<div class="imageblock"><div class="content"><img class="plantuml" src="http://localhost:8080/png/U9npoazIqBLJ24uiIbImKl18pSd91m0rkGMq"></div></div>' + doc = filter(input) + + expect(doc.to_s).to eq output + end + + it 'does not replace plantuml pre tag with img tag if disabled' do + stub_application_setting(plantuml_enabled: false) + + if Feature.enabled?(:use_cmark_renderer) + input = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' + output = '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' + else + input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' + output = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' + end + + doc = filter(input) + + expect(doc.to_s).to eq output + end + + it 'does not replace plantuml pre tag with img tag if url is invalid' do + stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid") + + input = if Feature.enabled?(:use_cmark_renderer) + '<pre lang="plantuml"><code>Bob -> Sara : Hello</code></pre>' + else + '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' + end + + output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> Error: cannot connect to PlantUML server at "invalid"</pre></div></div>' + doc = filter(input) + + expect(doc.to_s).to eq output + end end - it 'does not replace plantuml pre tag with img tag if disabled' do - stub_application_setting(plantuml_enabled: false) - input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' - output = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' - doc = filter(input) + context 'using ruby-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: false) + end - expect(doc.to_s).to eq output + it_behaves_like 'renders correct markdown' end - it 'does not replace plantuml pre tag with img tag if url is invalid' do - stub_application_setting(plantuml_enabled: true, plantuml_url: "invalid") - input = '<pre><code lang="plantuml">Bob -> Sara : Hello</code></pre>' - output = '<div class="listingblock"><div class="content"><pre class="plantuml plantuml-error"> Error: cannot connect to PlantUML server at "invalid"</pre></div></div>' - doc = filter(input) + context 'using c-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: true) + end - expect(doc.to_s).to eq output + it_behaves_like 'renders correct markdown' end end diff --git a/spec/lib/banzai/filter/sanitization_filter_spec.rb b/spec/lib/banzai/filter/sanitization_filter_spec.rb index f880fe06ce3..8eb8e5cf800 100644 --- a/spec/lib/banzai/filter/sanitization_filter_spec.rb +++ b/spec/lib/banzai/filter/sanitization_filter_spec.rb @@ -45,10 +45,10 @@ RSpec.describe Banzai::Filter::SanitizationFilter do it 'allows `text-align` property in `style` attribute on table elements' do html = <<~HTML - <table> - <tr><th style="text-align: center">Head</th></tr> - <tr><td style="text-align: right">Body</th></tr> - </table> + <table> + <tr><th style="text-align: center">Head</th></tr> + <tr><td style="text-align: right">Body</th></tr> + </table> HTML doc = filter(html) @@ -140,14 +140,14 @@ RSpec.describe Banzai::Filter::SanitizationFilter do describe 'footnotes' do it 'allows correct footnote id property on links' do - exp = %q(<a href="#fn1" id="fnref1">foo/bar.md</a>) + exp = %q(<a href="#fn-first" id="fnref-first">foo/bar.md</a>) act = filter(exp) expect(act.to_html).to eq exp end it 'allows correct footnote id property on li element' do - exp = %q(<ol><li id="fn1">footnote</li></ol>) + exp = %q(<ol><li id="fn-last">footnote</li></ol>) act = filter(exp) expect(act.to_html).to eq exp @@ -156,7 +156,7 @@ RSpec.describe Banzai::Filter::SanitizationFilter do it 'removes invalid id for footnote links' do exp = %q(<a href="#fn1">link</a>) - %w[fnrefx test xfnref1].each do |id| + %w[fnrefx test xfnref-1].each do |id| act = filter(%(<a href="#fn1" id="#{id}">link</a>)) expect(act.to_html).to eq exp @@ -166,18 +166,58 @@ RSpec.describe Banzai::Filter::SanitizationFilter do it 'removes invalid id for footnote li' do exp = %q(<ol><li>footnote</li></ol>) - %w[fnx test xfn1].each do |id| + %w[fnx test xfn-1].each do |id| act = filter(%(<ol><li id="#{id}">footnote</li></ol>)) expect(act.to_html).to eq exp end end - it 'allows footnotes numbered higher than 9' do - exp = %q(<a href="#fn15" id="fnref15">link</a><ol><li id="fn15">footnote</li></ol>) - act = filter(exp) + context 'using ruby-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: false) + end - expect(act.to_html).to eq exp + it 'allows correct footnote id property on links' do + exp = %q(<a href="#fn1" id="fnref1">foo/bar.md</a>) + act = filter(exp) + + expect(act.to_html).to eq exp + end + + it 'allows correct footnote id property on li element' do + exp = %q(<ol><li id="fn1">footnote</li></ol>) + act = filter(exp) + + expect(act.to_html).to eq exp + end + + it 'removes invalid id for footnote links' do + exp = %q(<a href="#fn1">link</a>) + + %w[fnrefx test xfnref1].each do |id| + act = filter(%(<a href="#fn1" id="#{id}">link</a>)) + + expect(act.to_html).to eq exp + end + end + + it 'removes invalid id for footnote li' do + exp = %q(<ol><li>footnote</li></ol>) + + %w[fnx test xfn1].each do |id| + act = filter(%(<ol><li id="#{id}">footnote</li></ol>)) + + expect(act.to_html).to eq exp + end + end + + it 'allows footnotes numbered higher than 9' do + exp = %q(<a href="#fn15" id="fnref15">link</a><ol><li id="fn15">footnote</li></ol>) + act = filter(exp) + + expect(act.to_html).to eq exp + end end end end diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb index 7e45ecdd135..dfe022b51d2 100644 --- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb +++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb @@ -11,130 +11,210 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do # after Markdown rendering. result = filter(%{<pre lang="#{lang}"><code><script>alert(1)</script></code></pre>}) - expect(result.to_html).not_to include("<script>alert(1)</script>") - expect(result.to_html).to include("alert(1)") + # `(1)` symbols are wrapped by lexer tags. + expect(result.to_html).not_to match(%r{<script>alert.*<\/script>}) + + # `<>` stands for lexer tags like <span ...>, not <s above. + expect(result.to_html).to match(%r{alert(<.*>)?\((<.*>)?1(<.*>)?\)}) end end - context "when no language is specified" do - it "highlights as plaintext" do - result = filter('<pre><code>def fun end</code></pre>') + shared_examples_for 'renders correct markdown' do + context "when no language is specified" do + it "highlights as plaintext" do + result = filter('<pre><code>def fun end</code></pre>') - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>') - end + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>') + end - include_examples "XSS prevention", "" - end + include_examples "XSS prevention", "" + end - context "when contains mermaid diagrams" do - it "ignores mermaid blocks" do - result = filter('<pre data-mermaid-style="display"><code>mermaid code</code></pre>') + context "when contains mermaid diagrams" do + it "ignores mermaid blocks" do + result = filter('<pre data-mermaid-style="display"><code>mermaid code</code></pre>') - expect(result.to_html).to eq('<pre data-mermaid-style="display"><code>mermaid code</code></pre>') + expect(result.to_html).to eq('<pre data-mermaid-style="display"><code>mermaid code</code></pre>') + end end - end - context "when a valid language is specified" do - it "highlights as that language" do - result = filter('<pre><code lang="ruby">def fun end</code></pre>') + context "when a valid language is specified" do + it "highlights as that language" do + result = if Feature.enabled?(:use_cmark_renderer) + filter('<pre lang="ruby"><code>def fun end</code></pre>') + else + filter('<pre><code lang="ruby">def fun end</code></pre>') + end + + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>') + end - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>') + include_examples "XSS prevention", "ruby" end - include_examples "XSS prevention", "ruby" - end + context "when an invalid language is specified" do + it "highlights as plaintext" do + result = if Feature.enabled?(:use_cmark_renderer) + filter('<pre lang="gnuplot"><code>This is a test</code></pre>') + else + filter('<pre><code lang="gnuplot">This is a test</code></pre>') + end - context "when an invalid language is specified" do - it "highlights as plaintext" do - result = filter('<pre><code lang="gnuplot">This is a test</code></pre>') + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') + end - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') + include_examples "XSS prevention", "gnuplot" end - include_examples "XSS prevention", "gnuplot" - end + context "languages that should be passed through" do + let(:delimiter) { described_class::LANG_PARAMS_DELIMITER } + let(:data_attr) { described_class::LANG_PARAMS_ATTR } - context "languages that should be passed through" do - let(:delimiter) { described_class::PARAMS_DELIMITER } - let(:data_attr) { described_class::LANG_PARAMS_ATTR } + %w(math mermaid plantuml suggestion).each do |lang| + context "when #{lang} is specified" do + it "highlights as plaintext but with the correct language attribute and class" do + result = if Feature.enabled?(:use_cmark_renderer) + filter(%{<pre lang="#{lang}"><code>This is a test</code></pre>}) + else + filter(%{<pre><code lang="#{lang}">This is a test</code></pre>}) + end - %w(math mermaid plantuml suggestion).each do |lang| - context "when #{lang} is specified" do - it "highlights as plaintext but with the correct language attribute and class" do - result = filter(%{<pre><code lang="#{lang}">This is a test</code></pre>}) + expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + end - expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + include_examples "XSS prevention", lang end - include_examples "XSS prevention", lang + context "when #{lang} has extra params" do + let(:lang_params) { 'foo-bar-kux' } + + let(:xss_lang) do + if Feature.enabled?(:use_cmark_renderer) + "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>" + else + "#{lang}#{described_class::LANG_PARAMS_DELIMITER}<script>alert(1)</script>" + end + end + + it "includes data-lang-params tag with extra information" do + result = if Feature.enabled?(:use_cmark_renderer) + filter(%{<pre lang="#{lang}" data-meta="#{lang_params}"><code>This is a test</code></pre>}) + else + filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}">This is a test</code></pre>}) + end + + expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + end + + include_examples "XSS prevention", lang + + if Feature.enabled?(:use_cmark_renderer) + include_examples "XSS prevention", + "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>" + else + include_examples "XSS prevention", + "#{lang}#{described_class::LANG_PARAMS_DELIMITER}<script>alert(1)</script>" + end + + include_examples "XSS prevention", + "#{lang} data-meta=\"foo-bar-kux\"<script>alert(1)</script>" + end end - context "when #{lang} has extra params" do - let(:lang_params) { 'foo-bar-kux' } + context 'when multiple param delimiters are used' do + let(:lang) { 'suggestion' } + let(:lang_params) { '-1+10' } - it "includes data-lang-params tag with extra information" do - result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}">This is a test</code></pre>}) + let(:expected_result) do + %{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params} more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>} + end - expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + context 'when delimiter is space' do + it 'delimits on the first appearance' do + if Feature.enabled?(:use_cmark_renderer) + result = filter(%{<pre lang="#{lang}" data-meta="#{lang_params} more-things"><code>This is a test</code></pre>}) + + expect(result.to_html).to eq(expected_result) + else + result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}#{delimiter}more-things">This is a test</code></pre>}) + + expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + end + end end - include_examples "XSS prevention", lang - include_examples "XSS prevention", - "#{lang}#{described_class::PARAMS_DELIMITER}<script>alert(1)</script>" - include_examples "XSS prevention", - "#{lang}#{described_class::PARAMS_DELIMITER}<script>alert(1)</script>" + context 'when delimiter is colon' do + it 'delimits on the first appearance' do + result = filter(%{<pre lang="#{lang}#{delimiter}#{lang_params} more-things"><code>This is a test</code></pre>}) + + if Feature.enabled?(:use_cmark_renderer) + expect(result.to_html).to eq(expected_result) + else + expect(result.to_html).to eq(%{<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">This is a test</span></code></pre>}) + end + end + end end end - context 'when multiple param delimiters are used' do - let(:lang) { 'suggestion' } - let(:lang_params) { '-1+10' } + context "when sourcepos metadata is available" do + it "includes it in the highlighted code block" do + result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>') - it "delimits on the first appearance" do - result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}#{delimiter}more-things">This is a test</code></pre>}) - - expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>}) + expect(result.to_html).to eq('<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') end end - end - context "when sourcepos metadata is available" do - it "includes it in the highlighted code block" do - result = filter('<pre data-sourcepos="1:1-3:3"><code lang="plaintext">This is a test</code></pre>') + context "when Rouge lexing fails" do + before do + allow_next_instance_of(Rouge::Lexers::Ruby) do |instance| + allow(instance).to receive(:stream_tokens).and_raise(StandardError) + end + end - expect(result.to_html).to eq('<pre data-sourcepos="1:1-3:3" class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>') - end - end + it "highlights as plaintext" do + result = if Feature.enabled?(:use_cmark_renderer) + filter('<pre lang="ruby"><code>This is a test</code></pre>') + else + filter('<pre><code lang="ruby">This is a test</code></pre>') + end - context "when Rouge lexing fails" do - before do - allow_next_instance_of(Rouge::Lexers::Ruby) do |instance| - allow(instance).to receive(:stream_tokens).and_raise(StandardError) + expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre>') end + + include_examples "XSS prevention", "ruby" end - it "highlights as plaintext" do - result = filter('<pre><code lang="ruby">This is a test</code></pre>') + context "when Rouge lexing fails after a retry" do + before do + allow_next_instance_of(Rouge::Lexers::PlainText) do |instance| + allow(instance).to receive(:stream_tokens).and_raise(StandardError) + end + end - expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight" lang="" v-pre="true"><code><span id="LC1" class="line" lang="">This is a test</span></code></pre>') - end + it "does not add highlighting classes" do + result = filter('<pre><code>This is a test</code></pre>') + + expect(result.to_html).to eq('<pre><code>This is a test</code></pre>') + end - include_examples "XSS prevention", "ruby" + include_examples "XSS prevention", "ruby" + end end - context "when Rouge lexing fails after a retry" do + context 'using ruby-based HTML renderer' do before do - allow_next_instance_of(Rouge::Lexers::PlainText) do |instance| - allow(instance).to receive(:stream_tokens).and_raise(StandardError) - end + stub_feature_flags(use_cmark_renderer: false) end - it "does not add highlighting classes" do - result = filter('<pre><code>This is a test</code></pre>') + it_behaves_like 'renders correct markdown' + end - expect(result.to_html).to eq('<pre><code>This is a test</code></pre>') + context 'using c-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: true) end - include_examples "XSS prevention", "ruby" + it_behaves_like 'renders correct markdown' end end diff --git a/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb b/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb index 6de9d65f1b2..8103846d4f7 100644 --- a/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/emoji_pipeline_spec.rb @@ -3,18 +3,20 @@ require 'spec_helper' RSpec.describe Banzai::Pipeline::EmojiPipeline do + let(:emoji) { TanukiEmoji.find_by_alpha_code('100') } + def parse(text) described_class.to_html(text, {}) end it 'replaces emoji' do - expected_result = "Hello world #{Gitlab::Emoji.gl_emoji_tag('100')}" + expected_result = "Hello world #{Gitlab::Emoji.gl_emoji_tag(emoji)}" expect(parse('Hello world :100:')).to eq(expected_result) end it 'filters out HTML tags' do - expected_result = "Hello <b>world</b> #{Gitlab::Emoji.gl_emoji_tag('100')}" + expected_result = "Hello <b>world</b> #{Gitlab::Emoji.gl_emoji_tag(emoji)}" expect(parse('Hello <b>world</b> :100:')).to eq(expected_result) end diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb index 7a335fad3f8..01bca7b23e8 100644 --- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb @@ -31,29 +31,29 @@ RSpec.describe Banzai::Pipeline::FullPipeline do describe 'footnotes' do let(:project) { create(:project, :public) } let(:html) { described_class.to_html(footnote_markdown, project: project) } - let(:identifier) { html[/.*fnref1-(\d+).*/, 1] } + let(:identifier) { html[/.*fnref-1-(\d+).*/, 1] } let(:footnote_markdown) do <<~EOF - first[^1] and second[^second] and twenty[^twenty] + first[^1] and second[^😄second] and twenty[^_twenty] [^1]: one - [^second]: two - [^twenty]: twenty + [^😄second]: two + [^_twenty]: twenty EOF end let(:filtered_footnote) do - <<~EOF - <p dir="auto">first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn3-#{identifier}" id="fnref3-#{identifier}">3</a></sup></p> + <<~EOF.strip_heredoc + <p dir="auto">first<sup class="footnote-ref"><a href="#fn-1-#{identifier}" id="fnref-1-#{identifier}" data-footnote-ref="">1</a></sup> and second<sup class="footnote-ref"><a href="#fn-%F0%9F%98%84second-#{identifier}" id="fnref-%F0%9F%98%84second-#{identifier}" data-footnote-ref="">2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn-_twenty-#{identifier}" id="fnref-_twenty-#{identifier}" data-footnote-ref="">3</a></sup></p> - <section class="footnotes"><ol> - <li id="fn1-#{identifier}"> - <p>one <a href="#fnref1-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + <section class="footnotes" data-footnotes><ol> + <li id="fn-1-#{identifier}"> + <p>one <a href="#fnref-1-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> </li> - <li id="fn2-#{identifier}"> - <p>two <a href="#fnref2-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + <li id="fn-%F0%9F%98%84second-#{identifier}"> + <p>two <a href="#fnref-%F0%9F%98%84second-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> </li> - <li id="fn3-#{identifier}"> - <p>twenty <a href="#fnref3-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + <li id="fn-_twenty-#{identifier}"> + <p>twenty <a href="#fnref-_twenty-#{identifier}" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> </li> </ol></section> EOF @@ -64,6 +64,47 @@ RSpec.describe Banzai::Pipeline::FullPipeline do expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote end + + context 'using ruby-based HTML renderer' do + let(:html) { described_class.to_html(footnote_markdown, project: project) } + let(:identifier) { html[/.*fnref1-(\d+).*/, 1] } + let(:footnote_markdown) do + <<~EOF + first[^1] and second[^second] and twenty[^twenty] + [^1]: one + [^second]: two + [^twenty]: twenty + EOF + end + + let(:filtered_footnote) do + <<~EOF + <p dir="auto">first<sup class="footnote-ref"><a href="#fn1-#{identifier}" id="fnref1-#{identifier}">1</a></sup> and second<sup class="footnote-ref"><a href="#fn2-#{identifier}" id="fnref2-#{identifier}">2</a></sup> and twenty<sup class="footnote-ref"><a href="#fn3-#{identifier}" id="fnref3-#{identifier}">3</a></sup></p> + + <section class="footnotes"><ol> + <li id="fn1-#{identifier}"> + <p>one <a href="#fnref1-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + </li> + <li id="fn2-#{identifier}"> + <p>two <a href="#fnref2-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + </li> + <li id="fn3-#{identifier}"> + <p>twenty <a href="#fnref3-#{identifier}" class="footnote-backref"><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p> + </li> + </ol></section> + EOF + end + + before do + stub_feature_flags(use_cmark_renderer: false) + end + + it 'properly adds the necessary ids and classes' do + stub_commonmark_sourcepos_disabled + + expect(html.lines.map(&:strip).join("\n")).to eq filtered_footnote + end + end end describe 'links are detected as malicious' do diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb index 4903f624469..394fcc06eba 100644 --- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb +++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb @@ -5,18 +5,7 @@ require 'spec_helper' RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do using RSpec::Parameterized::TableSyntax - describe 'backslash escapes' do - let_it_be(:project) { create(:project, :public) } - let_it_be(:issue) { create(:issue, project: project) } - - def correct_html_included(markdown, expected) - result = described_class.call(markdown, {}) - - expect(result[:output].to_html).to include(expected) - - result - end - + shared_examples_for 'renders correct markdown' do describe 'CommonMark tests', :aggregate_failures do it 'converts all reference punctuation to literals' do reference_chars = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS @@ -32,7 +21,7 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do expect(result[:escaped_literals]).to be_truthy end - it 'ensure we handle all the GitLab reference characters' do + it 'ensure we handle all the GitLab reference characters', :eager_load do reference_chars = ObjectSpace.each_object(Class).map do |klass| next unless klass.included_modules.include?(Referable) next unless klass.respond_to?(:reference_prefix) @@ -79,10 +68,19 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do end describe 'work in all other contexts, including URLs and link titles, link references, and info strings in fenced code blocks' do + let(:markdown) { %Q(``` foo\\@bar\nfoo\n```) } + + it 'renders correct html' do + if Feature.enabled?(:use_cmark_renderer) + correct_html_included(markdown, %Q(<pre data-sourcepos="1:1-3:3" lang="foo@bar"><code>foo\n</code></pre>)) + else + correct_html_included(markdown, %Q(<code lang="foo@bar">foo\n</code>)) + end + end + where(:markdown, :expected) do %q![foo](/bar\@ "\@title")! | %q(<a href="/bar@" title="@title">foo</a>) %Q![foo]\n\n[foo]: /bar\\@ "\\@title"! | %q(<a href="/bar@" title="@title">foo</a>) - %Q(``` foo\\@bar\nfoo\n```) | %Q(<code lang="foo@bar">foo\n</code>) end with_them do @@ -91,4 +89,33 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do end end end + + describe 'backslash escapes' do + let_it_be(:project) { create(:project, :public) } + let_it_be(:issue) { create(:issue, project: project) } + + def correct_html_included(markdown, expected) + result = described_class.call(markdown, {}) + + expect(result[:output].to_html).to include(expected) + + result + end + + context 'using ruby-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: false) + end + + it_behaves_like 'renders correct markdown' + end + + context 'using c-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: true) + end + + it_behaves_like 'renders correct markdown' + end + end end diff --git a/spec/lib/banzai/renderer_spec.rb b/spec/lib/banzai/renderer_spec.rb index 52bf3087875..d487268da78 100644 --- a/spec/lib/banzai/renderer_spec.rb +++ b/spec/lib/banzai/renderer_spec.rb @@ -84,6 +84,24 @@ RSpec.describe Banzai::Renderer do end end + describe '#cacheless_render' do + context 'without cache' do + let(:object) { fake_object(fresh: false) } + let(:histogram) { double('prometheus histogram') } + + it 'returns cacheless render field' do + allow(renderer).to receive(:render_result).and_return(output: 'test') + allow(renderer).to receive(:real_duration_histogram).and_return(histogram) + allow(renderer).to receive(:cpu_duration_histogram).and_return(histogram) + + expect(renderer).to receive(:render_result).with('test', {}) + expect(histogram).to receive(:observe).twice + + renderer.cacheless_render('test') + end + end + end + describe '#post_process' do let(:context_options) { {} } let(:html) { 'Consequatur aperiam et nesciunt modi aut assumenda quo id. '} diff --git a/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb new file mode 100644 index 00000000000..9f71175f46f --- /dev/null +++ b/spec/lib/bulk_imports/common/pipelines/milestones_pipeline_spec.rb @@ -0,0 +1,154 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Pipelines::MilestonesPipeline do + let(:user) { create(:user) } + let(:group) { create(:group) } + let(:bulk_import) { create(:bulk_import, user: user) } + let(:tracker) { create(:bulk_import_tracker, entity: entity) } + let(:context) { BulkImports::Pipeline::Context.new(tracker) } + let(:source_project_id) { nil } # if set, then exported_milestone is a project milestone + let(:source_group_id) { nil } # if set, then exported_milestone is a group milestone + let(:exported_milestone_for_project) do + exported_milestone_for_group.merge( + 'events' => [{ + 'project_id' => source_project_id, + 'author_id' => 9, + 'created_at' => "2021-08-12T19:12:49.810Z", + 'updated_at' => "2021-08-12T19:12:49.810Z", + 'target_type' => "Milestone", + 'group_id' => source_group_id, + 'fingerprint' => 'f270eb9b27d0', + 'id' => 66, + 'action' => "created" + }] + ) + end + + let(:exported_milestone_for_group) do + { + 'id' => 1, + 'title' => "v1.0", + 'project_id' => source_project_id, + 'description' => "Amet velit repellat ut rerum aut cum.", + 'due_date' => "2019-11-22", + 'created_at' => "2019-11-20T17:02:14.296Z", + 'updated_at' => "2019-11-20T17:02:14.296Z", + 'state' => "active", + 'iid' => 2, + 'start_date' => "2019-11-21", + 'group_id' => source_group_id + } + end + + before do + group.add_owner(user) + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: exported_milestones)) + end + end + + subject { described_class.new(context) } + + shared_examples 'bulk_imports milestones pipeline' do + let(:tested_entity) { nil } + + describe '#run' do + it 'imports milestones into destination' do + expect { subject.run }.to change(Milestone, :count).by(1) + + imported_milestone = tested_entity.milestones.first + + expect(imported_milestone.title).to eq("v1.0") + expect(imported_milestone.description).to eq("Amet velit repellat ut rerum aut cum.") + expect(imported_milestone.due_date.to_s).to eq("2019-11-22") + expect(imported_milestone.created_at).to eq("2019-11-20T17:02:14.296Z") + expect(imported_milestone.updated_at).to eq("2019-11-20T17:02:14.296Z") + expect(imported_milestone.start_date.to_s).to eq("2019-11-21") + end + end + + describe '#load' do + context 'when milestone is not persisted' do + it 'saves the milestone' do + milestone = build(:milestone, group: group) + + expect(milestone).to receive(:save!) + + subject.load(context, milestone) + end + end + + context 'when milestone is persisted' do + it 'does not save milestone' do + milestone = create(:milestone, group: group) + + expect(milestone).not_to receive(:save!) + + subject.load(context, milestone) + end + end + + context 'when milestone is missing' do + it 'returns' do + expect(subject.load(context, nil)).to be_nil + end + end + end + end + + context 'group milestone' do + let(:exported_milestones) { [[exported_milestone_for_group, 0]] } + let(:entity) do + create( + :bulk_import_entity, + group: group, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Group', + destination_namespace: group.full_path + ) + end + + it_behaves_like 'bulk_imports milestones pipeline' do + let(:tested_entity) { group } + let(:source_group_id) { 1 } + end + end + + context 'project milestone' do + let(:project) { create(:project, group: group) } + let(:exported_milestones) { [[exported_milestone_for_project, 0]] } + + let(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + it_behaves_like 'bulk_imports milestones pipeline' do + let(:tested_entity) { project } + let(:source_project_id) { 1 } + + it 'imports events' do + subject.run + + imported_event = tested_entity.milestones.first.events.first + + expect(imported_event.created_at).to eq("2021-08-12T19:12:49.810Z") + expect(imported_event.updated_at).to eq("2021-08-12T19:12:49.810Z") + expect(imported_event.target_type).to eq("Milestone") + expect(imported_event.fingerprint).to eq("f270eb9b27d0") + expect(imported_event.action).to eq("created") + end + end + end +end diff --git a/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb new file mode 100644 index 00000000000..a3cc866a406 --- /dev/null +++ b/spec/lib/bulk_imports/common/pipelines/uploads_pipeline_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Pipelines::UploadsPipeline do + let_it_be(:tmpdir) { Dir.mktmpdir } + let_it_be(:project) { create(:project) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, source_full_path: 'test') } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:uploads_dir_path) { File.join(tmpdir, '72a497a02fe3ee09edae2ed06d390038') } + let_it_be(:upload_file_path) { File.join(uploads_dir_path, 'upload.txt')} + + subject(:pipeline) { described_class.new(context) } + + before do + stub_uploads_object_storage(FileUploader) + + FileUtils.mkdir_p(uploads_dir_path) + FileUtils.touch(upload_file_path) + end + + after do + FileUtils.remove_entry(tmpdir) if Dir.exist?(tmpdir) + end + + describe '#run' do + it 'imports uploads into destination portable and removes tmpdir' do + allow(Dir).to receive(:mktmpdir).with('bulk_imports').and_return(tmpdir) + allow(pipeline).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [upload_file_path])) + + pipeline.run + + expect(project.uploads.map { |u| u.retrieve_uploader.filename }).to include('upload.txt') + + expect(Dir.exist?(tmpdir)).to eq(false) + end + end + + describe '#extract' do + it 'downloads & extracts upload paths' do + allow(Dir).to receive(:mktmpdir).and_return(tmpdir) + expect(pipeline).to receive(:untar_zxf) + file_download_service = instance_double("BulkImports::FileDownloadService") + + expect(BulkImports::FileDownloadService) + .to receive(:new) + .with( + configuration: context.configuration, + relative_url: "/projects/test/export_relations/download?relation=uploads", + dir: tmpdir, + filename: 'uploads.tar.gz') + .and_return(file_download_service) + + expect(file_download_service).to receive(:execute) + + extracted_data = pipeline.extract(context) + + expect(extracted_data.data).to contain_exactly(uploads_dir_path, upload_file_path) + end + end + + describe '#load' do + it 'creates a file upload' do + expect { pipeline.load(context, upload_file_path) }.to change { project.uploads.count }.by(1) + end + + context 'when dynamic path is nil' do + it 'returns' do + expect { pipeline.load(context, File.join(tmpdir, 'test')) }.not_to change { project.uploads.count } + end + end + + context 'when path is a directory' do + it 'returns' do + expect { pipeline.load(context, uploads_dir_path) }.not_to change { project.uploads.count } + end + end + end +end diff --git a/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb b/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb new file mode 100644 index 00000000000..0eefb7390dc --- /dev/null +++ b/spec/lib/bulk_imports/common/pipelines/wiki_pipeline_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Common::Pipelines::WikiPipeline do + describe '#run' do + let_it_be(:user) { create(:user) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:parent) { create(:project) } + + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Wiki', + destination_namespace: parent.full_path, + project: parent + ) + end + + it_behaves_like 'wiki pipeline imports a wiki for an entity' + end +end diff --git a/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb deleted file mode 100644 index 7a0f964c5f3..00000000000 --- a/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb +++ /dev/null @@ -1,35 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Graphql::GetMilestonesQuery do - it 'has a valid query' do - tracker = create(:bulk_import_tracker) - context = BulkImports::Pipeline::Context.new(tracker) - - query = GraphQL::Query.new( - GitlabSchema, - described_class.to_s, - variables: described_class.variables(context) - ) - result = GitlabSchema.static_validator.validate(query) - - expect(result[:errors]).to be_empty - end - - describe '#data_path' do - it 'returns data path' do - expected = %w[data group milestones nodes] - - expect(described_class.data_path).to eq(expected) - end - end - - describe '#page_info_path' do - it 'returns pagination information path' do - expected = %w[data group milestones page_info] - - expect(described_class.page_info_path).to eq(expected) - end - end -end diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb index de0b56045b3..69363bf0866 100644 --- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb +++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb @@ -11,20 +11,66 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } let(:service_double) { instance_double(::Groups::CreateService) } - let(:data) { { foo: :bar } } + let(:data) { { 'path' => 'test' } } subject { described_class.new } + context 'when path is missing' do + it 'raises an error' do + expect { subject.load(context, {}) }.to raise_error(described_class::GroupCreationError, 'Path is missing') + end + end + + context 'when destination namespace is not a group' do + it 'raises an error' do + entity.update!(destination_namespace: user.namespace.path) + + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'Destination is not a group') + end + end + + context 'when group exists' do + it 'raises an error' do + group1 = create(:group) + group2 = create(:group, parent: group1) + entity.update!(destination_namespace: group1.full_path) + data = { 'path' => group2.path } + + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'Group exists') + end + end + + context 'when there are other group errors' do + it 'raises an error with those errors' do + group = ::Group.new + group.validate + expected_errors = group.errors.full_messages.to_sentence + + expect(::Groups::CreateService) + .to receive(:new) + .with(context.current_user, data) + .and_return(service_double) + + expect(service_double).to receive(:execute).and_return(group) + expect(entity).not_to receive(:update!) + + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, expected_errors) + end + end + context 'when user can create group' do shared_examples 'calls Group Create Service to create a new group' do it 'calls Group Create Service to create a new group' do + group_double = instance_double(::Group) + expect(::Groups::CreateService) .to receive(:new) .with(context.current_user, data) .and_return(service_double) - expect(service_double).to receive(:execute) - expect(entity).to receive(:update!) + expect(service_double).to receive(:execute).and_return(group_double) + expect(group_double).to receive(:errors).and_return([]) + expect(entity).to receive(:update!).with(group: group_double) subject.load(context, data) end @@ -40,7 +86,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do context 'when there is parent group' do let(:parent) { create(:group) } - let(:data) { { 'parent_id' => parent.id } } + let(:data) { { 'parent_id' => parent.id, 'path' => 'test' } } before do allow(Ability).to receive(:allowed?).with(user, :create_subgroup, parent).and_return(true) @@ -55,7 +101,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do it 'does not create new group' do expect(::Groups::CreateService).not_to receive(:new) - subject.load(context, data) + expect { subject.load(context, data) }.to raise_error(described_class::GroupCreationError, 'User not allowed to create group') end end @@ -69,7 +115,7 @@ RSpec.describe BulkImports::Groups::Loaders::GroupLoader do context 'when there is parent group' do let(:parent) { create(:group) } - let(:data) { { 'parent_id' => parent.id } } + let(:data) { { 'parent_id' => parent.id, 'path' => 'test' } } before do allow(Ability).to receive(:allowed?).with(user, :create_subgroup, parent).and_return(false) diff --git a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb deleted file mode 100644 index a8354e62459..00000000000 --- a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb +++ /dev/null @@ -1,73 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do - let_it_be(:user) { create(:user) } - let_it_be(:group) { create(:group) } - let_it_be(:bulk_import) { create(:bulk_import, user: user) } - let_it_be(:filepath) { 'spec/fixtures/bulk_imports/gz/milestones.ndjson.gz' } - let_it_be(:entity) do - create( - :bulk_import_entity, - group: group, - bulk_import: bulk_import, - source_full_path: 'source/full/path', - destination_name: 'My Destination Group', - destination_namespace: group.full_path - ) - end - - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - - let(:tmpdir) { Dir.mktmpdir } - - before do - FileUtils.copy_file(filepath, File.join(tmpdir, 'milestones.ndjson.gz')) - group.add_owner(user) - end - - subject { described_class.new(context) } - - describe '#run' do - it 'imports group milestones into destination group and removes tmpdir' do - allow(Dir).to receive(:mktmpdir).and_return(tmpdir) - allow_next_instance_of(BulkImports::FileDownloadService) do |service| - allow(service).to receive(:execute) - end - - expect { subject.run }.to change(Milestone, :count).by(5) - expect(group.milestones.pluck(:title)).to contain_exactly('v4.0', 'v3.0', 'v2.0', 'v1.0', 'v0.0') - expect(File.directory?(tmpdir)).to eq(false) - end - end - - describe '#load' do - context 'when milestone is not persisted' do - it 'saves the milestone' do - milestone = build(:milestone, group: group) - - expect(milestone).to receive(:save!) - - subject.load(context, milestone) - end - end - - context 'when milestone is persisted' do - it 'does not save milestone' do - milestone = create(:milestone, group: group) - - expect(milestone).not_to receive(:save!) - - subject.load(context, milestone) - end - end - - context 'when milestone is missing' do - it 'returns' do - expect(subject.load(context, nil)).to be_nil - end - end - end -end diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb index b322b7b0edf..5719acac4d7 100644 --- a/spec/lib/bulk_imports/groups/stage_spec.rb +++ b/spec/lib/bulk_imports/groups/stage_spec.rb @@ -12,7 +12,7 @@ RSpec.describe BulkImports::Groups::Stage do [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline], [1, BulkImports::Groups::Pipelines::MembersPipeline], [1, BulkImports::Common::Pipelines::LabelsPipeline], - [1, BulkImports::Groups::Pipelines::MilestonesPipeline], + [1, BulkImports::Common::Pipelines::MilestonesPipeline], [1, BulkImports::Groups::Pipelines::BadgesPipeline], [2, BulkImports::Common::Pipelines::BoardsPipeline] ] diff --git a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb index 7d156c2c3df..c5197fb29d9 100644 --- a/spec/lib/bulk_imports/ndjson_pipeline_spec.rb +++ b/spec/lib/bulk_imports/ndjson_pipeline_spec.rb @@ -111,6 +111,7 @@ RSpec.describe BulkImports::NdjsonPipeline do context = double(portable: group, current_user: user, import_export_config: config, bulk_import: import_double, entity: entity_double) allow(subject).to receive(:import_export_config).and_return(config) allow(subject).to receive(:context).and_return(context) + relation_object = double expect(Gitlab::ImportExport::Group::RelationFactory) .to receive(:create) @@ -124,6 +125,8 @@ RSpec.describe BulkImports::NdjsonPipeline do user: user, excluded_keys: nil ) + .and_return(relation_object) + expect(relation_object).to receive(:assign_attributes).with(group: group) subject.transform(context, data) end diff --git a/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb new file mode 100644 index 00000000000..8f610fcc2ae --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/external_pull_requests_pipeline_spec.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline do + let_it_be(:project) { create(:project) } + let_it_be(:bulk_import) { create(:bulk_import) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:attributes) { {} } + let(:external_pr) { project.external_pull_requests.last } + let(:external_pull_request) do + { + 'pull_request_iid' => 4, + 'source_branch' => 'feature', + 'target_branch' => 'main', + 'source_repository' => 'repository', + 'target_repository' => 'repository', + 'source_sha' => 'abc', + 'target_sha' => 'xyz', + 'status' => 'open', + 'created_at' => '2019-12-24T14:04:50.053Z', + 'updated_at' => '2019-12-24T14:05:18.138Z' + }.merge(attributes) + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + before do + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:remove_tmp_dir) + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[external_pull_request, 0]])) + end + + pipeline.run + end + + it 'imports external pull request', :aggregate_failures do + expect(external_pr.pull_request_iid).to eq(external_pull_request['pull_request_iid']) + expect(external_pr.source_branch).to eq(external_pull_request['source_branch']) + expect(external_pr.target_branch).to eq(external_pull_request['target_branch']) + expect(external_pr.status).to eq(external_pull_request['status']) + expect(external_pr.created_at).to eq(external_pull_request['created_at']) + expect(external_pr.updated_at).to eq(external_pull_request['updated_at']) + end + + context 'when status is closed' do + let(:attributes) { { 'status' => 'closed' } } + + it 'imports closed external pull request' do + expect(external_pr.status).to eq(attributes['status']) + end + end + + context 'when from fork' do + let(:attributes) { { 'source_repository' => 'source' } } + + it 'does not create external pull request' do + expect(external_pr).to be_nil + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb new file mode 100644 index 00000000000..3f02356b41e --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/merge_requests_pipeline_spec.rb @@ -0,0 +1,297 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::MergeRequestsPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, :repository, group: group) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + project: project, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Project', + destination_namespace: group.full_path + ) + end + + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + + let(:mr) do + { + 'iid' => 7, + 'author_id' => 22, + 'source_project_id' => 1234, + 'target_project_id' => 1234, + 'title' => 'Imported MR', + 'description' => 'Description', + 'state' => 'opened', + 'source_branch' => 'feature', + 'target_branch' => 'main', + 'source_branch_sha' => 'ABCD', + 'target_branch_sha' => 'DCBA', + 'created_at' => '2020-06-14T15:02:47.967Z', + 'updated_at' => '2020-06-14T15:03:47.967Z', + 'merge_request_diff' => { + 'state' => 'collected', + 'base_commit_sha' => 'ae73cb07c9eeaf35924a10f713b364d32b2dd34f', + 'head_commit_sha' => 'a97f74ddaa848b707bea65441c903ae4bf5d844d', + 'start_commit_sha' => '9eea46b5c72ead701c22f516474b95049c9d9462', + 'merge_request_diff_commits' => [ + { + 'sha' => 'COMMIT1', + 'relative_order' => 0, + 'message' => 'commit message', + 'authored_date' => '2014-08-06T08:35:52.000+02:00', + 'committed_date' => '2014-08-06T08:35:52.000+02:00', + 'commit_author' => { + 'name' => 'Commit Author', + 'email' => 'gitlab@example.com' + }, + 'committer' => { + 'name' => 'Committer', + 'email' => 'committer@example.com' + } + } + ], + 'merge_request_diff_files' => [ + { + 'relative_order' => 0, + 'utf8_diff' => '--- a/.gitignore\n+++ b/.gitignore\n@@ -1 +1 @@ test\n', + 'new_path' => '.gitignore', + 'old_path' => '.gitignore', + 'a_mode' => '100644', + 'b_mode' => '100644', + 'new_file' => false, + 'renamed_file' => false, + 'deleted_file' => false, + 'too_large' => false + } + ] + } + }.merge(attributes) + end + + let(:attributes) { {} } + let(:imported_mr) { project.merge_requests.find_by_title(mr['title']) } + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + before do + group.add_owner(user) + + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:remove_tmp_dir) + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [[mr, 0]])) + end + + allow(project.repository).to receive(:fetch_source_branch!).and_return(true) + allow(project.repository).to receive(:branch_exists?).and_return(false) + allow(project.repository).to receive(:create_branch) + + pipeline.run + end + + it 'imports a merge request' do + expect(project.merge_requests.count).to eq(1) + expect(imported_mr.title).to eq(mr['title']) + expect(imported_mr.description).to eq(mr['description']) + expect(imported_mr.state).to eq(mr['state']) + expect(imported_mr.iid).to eq(mr['iid']) + expect(imported_mr.created_at).to eq(mr['created_at']) + expect(imported_mr.updated_at).to eq(mr['updated_at']) + expect(imported_mr.author).to eq(user) + end + + context 'merge request state' do + context 'when mr is closed' do + let(:attributes) { { 'state' => 'closed' } } + + it 'imported mr as closed' do + expect(imported_mr.state).to eq(attributes['state']) + end + end + + context 'when mr is merged' do + let(:attributes) { { 'state' => 'merged' } } + + it 'imported mr as merged' do + expect(imported_mr.state).to eq(attributes['state']) + end + end + end + + context 'source & target project' do + it 'has the new project as target' do + expect(imported_mr.target_project).to eq(project) + end + + it 'has the new project as source' do + expect(imported_mr.source_project).to eq(project) + end + + context 'when source/target projects differ' do + let(:attributes) { { 'source_project_id' => 4321 } } + + it 'has no source' do + expect(imported_mr.source_project).to be_nil + end + + context 'when diff_head_sha is present' do + let(:attributes) { { 'diff_head_sha' => 'HEAD', 'source_project_id' => 4321 } } + + it 'has the new project as source' do + expect(imported_mr.source_project).to eq(project) + end + end + end + end + + context 'resource label events' do + let(:attributes) { { 'resource_label_events' => [{ 'action' => 'add', 'user_id' => 1 }] } } + + it 'restores resource label events' do + expect(imported_mr.resource_label_events.first.action).to eq('add') + end + end + + context 'award emoji' do + let(:attributes) { { 'award_emoji' => [{ 'name' => 'tada', 'user_id' => 22 }] } } + + it 'has award emoji' do + expect(imported_mr.award_emoji.first.name).to eq(attributes['award_emoji'].first['name']) + end + end + + context 'notes' do + let(:note) { imported_mr.notes.first } + let(:attributes) do + { + 'notes' => [ + { + 'note' => 'Issue note', + 'note_html' => '<p>something else entirely</p>', + 'cached_markdown_version' => 917504, + 'author_id' => 22, + 'author' => { 'name' => 'User 22' }, + 'created_at' => '2016-06-14T15:02:56.632Z', + 'updated_at' => '2016-06-14T15:02:47.770Z', + 'award_emoji' => [{ 'name' => 'clapper', 'user_id' => 22 }] + } + ] + } + end + + it 'imports mr note' do + expect(note).to be_present + expect(note.note).to include('By User 22') + expect(note.note).to include(attributes['notes'].first['note']) + expect(note.author).to eq(user) + end + + it 'has award emoji' do + emoji = note.award_emoji.first + + expect(emoji.name).to eq('clapper') + expect(emoji.user).to eq(user) + end + + it 'does not import note_html' do + expect(note.note_html).to match(attributes['notes'].first['note']) + expect(note.note_html).not_to match(attributes['notes'].first['note_html']) + end + end + + context 'system note metadata' do + let(:attributes) do + { + 'notes' => [ + { + 'note' => 'added 3 commits', + 'system' => true, + 'author_id' => 22, + 'author' => { 'name' => 'User 22' }, + 'created_at' => '2016-06-14T15:02:56.632Z', + 'updated_at' => '2016-06-14T15:02:47.770Z', + 'system_note_metadata' => { 'action' => 'commit', 'commit_count' => 3 } + } + ] + } + end + + it 'restores system note metadata' do + note = imported_mr.notes.first + + expect(note.system).to eq(true) + expect(note.noteable_type).to eq('MergeRequest') + expect(note.system_note_metadata.action).to eq('commit') + expect(note.system_note_metadata.commit_count).to eq(3) + end + end + + context 'diffs' do + it 'imports merge request diff' do + expect(imported_mr.merge_request_diff).to be_present + end + + it 'has the correct data for merge request latest_merge_request_diff' do + expect(imported_mr.latest_merge_request_diff_id).to eq(imported_mr.merge_request_diffs.maximum(:id)) + end + + it 'imports diff files' do + expect(imported_mr.merge_request_diff.merge_request_diff_files.count).to eq(1) + end + + context 'diff commits' do + it 'imports diff commits' do + expect(imported_mr.merge_request_diff.merge_request_diff_commits.count).to eq(1) + end + + it 'assigns committer and author details to diff commits' do + commit = imported_mr.merge_request_diff.merge_request_diff_commits.first + + expect(commit.commit_author_id).not_to be_nil + expect(commit.committer_id).not_to be_nil + end + + it 'assigns the correct commit users to diff commits' do + commit = MergeRequestDiffCommit.find_by(sha: 'COMMIT1') + + expect(commit.commit_author.name).to eq('Commit Author') + expect(commit.commit_author.email).to eq('gitlab@example.com') + expect(commit.committer.name).to eq('Committer') + expect(commit.committer.email).to eq('committer@example.com') + end + end + end + + context 'labels' do + let(:attributes) do + { + 'label_links' => [ + { 'label' => { 'title' => 'imported label 1', 'type' => 'ProjectLabel' } }, + { 'label' => { 'title' => 'imported label 2', 'type' => 'ProjectLabel' } } + ] + } + end + + it 'imports labels' do + expect(imported_mr.labels.pluck(:title)).to contain_exactly('imported label 1', 'imported label 2') + end + end + + context 'milestone' do + let(:attributes) { { 'milestone' => { 'title' => 'imported milestone' } } } + + it 'imports milestone' do + expect(imported_mr.milestone.title).to eq(attributes.dig('milestone', 'title')) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb new file mode 100644 index 00000000000..7de2e266192 --- /dev/null +++ b/spec/lib/bulk_imports/projects/pipelines/protected_branches_pipeline_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe BulkImports::Projects::Pipelines::ProtectedBranchesPipeline do + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:protected_branch) do + { + 'name' => 'main', + 'created_at' => '2016-06-14T15:02:47.967Z', + 'updated_at' => '2016-06-14T15:02:47.967Z', + 'merge_access_levels' => [ + { + 'access_level' => 40, + 'created_at' => '2016-06-15T15:02:47.967Z', + 'updated_at' => '2016-06-15T15:02:47.967Z' + } + ], + 'push_access_levels' => [ + { + 'access_level' => 30, + 'created_at' => '2016-06-16T15:02:47.967Z', + 'updated_at' => '2016-06-16T15:02:47.967Z' + } + ] + } + end + + subject(:pipeline) { described_class.new(context) } + + describe '#run' do + it 'imports protected branch information' do + allow_next_instance_of(BulkImports::Common::Extractors::NdjsonExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: [protected_branch, 0])) + end + + pipeline.run + + imported_protected_branch = project.protected_branches.last + merge_access_level = imported_protected_branch.merge_access_levels.first + push_access_level = imported_protected_branch.push_access_levels.first + + aggregate_failures do + expect(imported_protected_branch.name).to eq(protected_branch['name']) + expect(imported_protected_branch.updated_at).to eq(protected_branch['updated_at']) + expect(imported_protected_branch.created_at).to eq(protected_branch['created_at']) + expect(merge_access_level.access_level).to eq(protected_branch['merge_access_levels'].first['access_level']) + expect(merge_access_level.created_at).to eq(protected_branch['merge_access_levels'].first['created_at']) + expect(merge_access_level.updated_at).to eq(protected_branch['merge_access_levels'].first['updated_at']) + expect(push_access_level.access_level).to eq(protected_branch['push_access_levels'].first['access_level']) + expect(push_access_level.created_at).to eq(protected_branch['push_access_levels'].first['created_at']) + expect(push_access_level.updated_at).to eq(protected_branch['push_access_levels'].first['updated_at']) + end + end + end +end diff --git a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb index af39ec7a11c..583485faf8d 100644 --- a/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb +++ b/spec/lib/bulk_imports/projects/pipelines/repository_pipeline_spec.rb @@ -3,71 +3,72 @@ require 'spec_helper' RSpec.describe BulkImports::Projects::Pipelines::RepositoryPipeline do - describe '#run' do - let_it_be(:user) { create(:user) } - let_it_be(:parent) { create(:project) } - let_it_be(:bulk_import) { create(:bulk_import, user: user) } - let_it_be(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) } - - let_it_be(:entity) do - create( - :bulk_import_entity, - :project_entity, - bulk_import: bulk_import, - source_full_path: 'source/full/path', - destination_name: 'My Destination Repository', - destination_namespace: parent.full_path, - project: parent - ) - end + let_it_be(:user) { create(:user) } + let_it_be(:parent) { create(:project) } + let_it_be(:bulk_import) { create(:bulk_import, user: user) } + let_it_be(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) } + + let_it_be(:entity) do + create( + :bulk_import_entity, + :project_entity, + bulk_import: bulk_import, + source_full_path: 'source/full/path', + destination_name: 'My Destination Repository', + destination_namespace: parent.full_path, + project: parent + ) + end - let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } - let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } + let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) } + let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) } - context 'successfully imports repository' do - let(:project_data) do - { - 'httpUrlToRepo' => 'http://test.git' - } - end + let(:extracted_data) { BulkImports::Pipeline::ExtractedData.new(data: project_data) } - subject { described_class.new(context) } + subject(:pipeline) { described_class.new(context) } + + before do + allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| + allow(extractor).to receive(:extract).and_return(extracted_data) + end + end + + describe '#run' do + context 'successfully imports repository' do + let(:project_data) { { 'httpUrlToRepo' => 'http://test.git' } } it 'imports new repository into destination project' do - allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| - allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data)) - end + url = project_data['httpUrlToRepo'].sub("://", "://oauth2:#{bulk_import_configuration.access_token}@") - expect_next_instance_of(Gitlab::GitalyClient::RepositoryService) do |repository_service| - url = project_data['httpUrlToRepo'].sub("://", "://oauth2:#{bulk_import_configuration.access_token}@") - expect(repository_service).to receive(:import_repository).with(url).and_return 0 - end + expect(context.portable).to receive(:ensure_repository) + expect(context.portable.repository).to receive(:fetch_as_mirror).with(url) - subject.run + pipeline.run end end context 'blocked local networks' do - let(:project_data) do - { - 'httpUrlToRepo' => 'http://localhost/foo.git' - } - end + let(:project_data) { { 'httpUrlToRepo' => 'http://localhost/foo.git' } } - before do + it 'imports new repository into destination project' do allow(Gitlab.config.gitlab).to receive(:host).and_return('notlocalhost.gitlab.com') allow(Gitlab::CurrentSettings).to receive(:allow_local_requests_from_web_hooks_and_services?).and_return(false) - allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor| - allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: project_data)) - end - end - subject { described_class.new(context) } + pipeline.run - it 'imports new repository into destination project' do - subject.run - expect(context.entity.failed?).to be_truthy + expect(context.entity.failed?).to eq(true) end end end + + describe '#after_run' do + it 'executes housekeeping service after import' do + service = instance_double(Repositories::HousekeepingService) + + expect(Repositories::HousekeepingService).to receive(:new).with(context.portable, :gc).and_return(service) + expect(service).to receive(:execute) + + pipeline.after_run(context) + end + end end diff --git a/spec/lib/bulk_imports/projects/stage_spec.rb b/spec/lib/bulk_imports/projects/stage_spec.rb index c606cf7c556..e7670085f60 100644 --- a/spec/lib/bulk_imports/projects/stage_spec.rb +++ b/spec/lib/bulk_imports/projects/stage_spec.rb @@ -8,9 +8,15 @@ RSpec.describe BulkImports::Projects::Stage do [0, BulkImports::Projects::Pipelines::ProjectPipeline], [1, BulkImports::Projects::Pipelines::RepositoryPipeline], [2, BulkImports::Common::Pipelines::LabelsPipeline], + [2, BulkImports::Common::Pipelines::MilestonesPipeline], [3, BulkImports::Projects::Pipelines::IssuesPipeline], [4, BulkImports::Common::Pipelines::BoardsPipeline], - [5, BulkImports::Common::Pipelines::EntityFinisher] + [4, BulkImports::Projects::Pipelines::MergeRequestsPipeline], + [4, BulkImports::Projects::Pipelines::ExternalPullRequestsPipeline], + [4, BulkImports::Projects::Pipelines::ProtectedBranchesPipeline], + [5, BulkImports::Common::Pipelines::WikiPipeline], + [5, BulkImports::Common::Pipelines::UploadsPipeline], + [6, BulkImports::Common::Pipelines::EntityFinisher] ] end @@ -22,7 +28,8 @@ RSpec.describe BulkImports::Projects::Stage do describe '#pipelines' do it 'list all the pipelines with their stage number, ordered by stage' do - expect(subject.pipelines).to eq(pipelines) + expect(subject.pipelines & pipelines).to contain_exactly(*pipelines) + expect(subject.pipelines.last.last).to eq(BulkImports::Common::Pipelines::EntityFinisher) end end end diff --git a/spec/lib/container_registry/client_spec.rb b/spec/lib/container_registry/client_spec.rb index 47a8fcf5dd0..259d7d5ad13 100644 --- a/spec/lib/container_registry/client_spec.rb +++ b/spec/lib/container_registry/client_spec.rb @@ -279,7 +279,7 @@ RSpec.describe ContainerRegistry::Client do it 'uploads the manifest and returns the digest' do stub_request(:put, "http://container-registry/v2/path/manifests/tagA") .with(body: "{\n \"foo\": \"bar\"\n}", headers: manifest_headers) - .to_return(status: 200, body: "", headers: { 'docker-content-digest' => 'sha256:123' }) + .to_return(status: 200, body: "", headers: { DependencyProxy::Manifest::DIGEST_HEADER => 'sha256:123' }) expect_new_faraday(timeout: false) diff --git a/spec/lib/container_registry/tag_spec.rb b/spec/lib/container_registry/tag_spec.rb index d6e6b254dd9..9b931ab6dbc 100644 --- a/spec/lib/container_registry/tag_spec.rb +++ b/spec/lib/container_registry/tag_spec.rb @@ -213,7 +213,7 @@ RSpec.describe ContainerRegistry::Tag do before do stub_request(:head, 'http://registry.gitlab/v2/group/test/manifests/tag') .with(headers: headers) - .to_return(status: 200, headers: { 'Docker-Content-Digest' => 'sha256:digest' }) + .to_return(status: 200, headers: { DependencyProxy::Manifest::DIGEST_HEADER => 'sha256:digest' }) end describe '#digest' do diff --git a/spec/lib/error_tracking/collector/payload_validator_spec.rb b/spec/lib/error_tracking/collector/payload_validator_spec.rb new file mode 100644 index 00000000000..852cf9eac6c --- /dev/null +++ b/spec/lib/error_tracking/collector/payload_validator_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ErrorTracking::Collector::PayloadValidator do + describe '#valid?' do + RSpec.shared_examples 'valid payload' do + it 'returns true' do + expect(described_class.new.valid?(payload)).to be_truthy + end + end + + RSpec.shared_examples 'invalid payload' do + it 'returns false' do + expect(described_class.new.valid?(payload)).to be_falsey + end + end + + context 'ruby payload' do + let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/parsed_event.json')) } + + it_behaves_like 'valid payload' + end + + context 'python payload' do + let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/python_event.json')) } + + it_behaves_like 'valid payload' + end + + context 'browser payload' do + let(:payload) { Gitlab::Json.parse(fixture_file('error_tracking/browser_event.json')) } + + it_behaves_like 'valid payload' + end + + context 'empty payload' do + let(:payload) { '' } + + it_behaves_like 'invalid payload' + end + + context 'invalid payload' do + let(:payload) { { 'foo' => 'bar' } } + + it_behaves_like 'invalid payload' + end + end +end diff --git a/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb b/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb index 6f12c6d25e0..06f4b64ce93 100644 --- a/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb +++ b/spec/lib/error_tracking/collector/sentry_request_parser_spec.rb @@ -33,12 +33,5 @@ RSpec.describe ErrorTracking::Collector::SentryRequestParser do context 'plain text sentry request' do it_behaves_like 'valid parser' end - - context 'gzip encoded sentry request' do - let(:headers) { { 'Content-Encoding' => 'gzip' } } - let(:body) { Zlib.gzip(raw_event) } - - it_behaves_like 'valid parser' - end end end diff --git a/spec/lib/feature/gitaly_spec.rb b/spec/lib/feature/gitaly_spec.rb index 311589c3253..ed80e31e3cd 100644 --- a/spec/lib/feature/gitaly_spec.rb +++ b/spec/lib/feature/gitaly_spec.rb @@ -78,7 +78,9 @@ RSpec.describe Feature::Gitaly do context 'when table does not exist' do before do - allow(::Gitlab::Database.main).to receive(:cached_table_exists?).and_return(false) + allow(Feature::FlipperFeature.database) + .to receive(:cached_table_exists?) + .and_return(false) end it 'returns an empty Hash' do diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb index 9d4820f9a4c..58e7292c125 100644 --- a/spec/lib/feature_spec.rb +++ b/spec/lib/feature_spec.rb @@ -102,12 +102,14 @@ RSpec.describe Feature, stub_feature_flags: false do describe '.flipper' do context 'when request store is inactive' do - it 'memoizes the Flipper instance' do + it 'memoizes the Flipper instance but does not not enable Flipper memoization' do expect(Flipper).to receive(:new).once.and_call_original 2.times do - described_class.send(:flipper) + described_class.flipper end + + expect(described_class.flipper.adapter.memoizing?).to eq(false) end end @@ -115,9 +117,11 @@ RSpec.describe Feature, stub_feature_flags: false do it 'memoizes the Flipper instance' do expect(Flipper).to receive(:new).once.and_call_original - described_class.send(:flipper) + described_class.flipper described_class.instance_variable_set(:@flipper, nil) - described_class.send(:flipper) + described_class.flipper + + expect(described_class.flipper.adapter.memoizing?).to eq(true) end end end @@ -310,7 +314,7 @@ RSpec.describe Feature, stub_feature_flags: false do context 'when database exists' do before do - allow(Gitlab::Database.main).to receive(:exists?).and_return(true) + allow(ApplicationRecord.database).to receive(:exists?).and_return(true) end it 'checks the persisted status and returns false' do @@ -322,7 +326,7 @@ RSpec.describe Feature, stub_feature_flags: false do context 'when database does not exist' do before do - allow(Gitlab::Database.main).to receive(:exists?).and_return(false) + allow(ApplicationRecord.database).to receive(:exists?).and_return(false) end it 'returns false without checking the status in the database' do diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb index 05833cf4ec4..b67425ae012 100644 --- a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb +++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb @@ -99,4 +99,15 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator, :silence_stdout do expect(YAML.safe_load(File.read(metric_definition_path))).to include("name" => "some name") end end + + context 'with multiple file names' do + let(:key_paths) { ['counts_weekly.test_metric', 'counts_weekly.test1_metric'] } + + it 'creates multiple files' do + described_class.new(key_paths, { 'dir' => dir }).invoke_all + files = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_metric.yml')) + + expect(files.count).to eq(2) + end + end end diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder_spec.rb new file mode 100644 index 00000000000..bf2f8d8159b --- /dev/null +++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/base_query_builder_spec.rb @@ -0,0 +1,150 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::BaseQueryBuilder do + let_it_be(:group) { create(:group) } + let_it_be(:project) { create(:project, group: group) } + let_it_be(:milestone) { create(:milestone, project: project) } + let_it_be(:user_1) { create(:user) } + + let_it_be(:label_1) { create(:label, project: project) } + let_it_be(:label_2) { create(:label, project: project) } + + let_it_be(:issue_1) { create(:issue, project: project, author: project.creator, labels: [label_1, label_2]) } + let_it_be(:issue_2) { create(:issue, project: project, milestone: milestone, assignees: [user_1]) } + let_it_be(:issue_3) { create(:issue, project: project) } + let_it_be(:issue_outside_project) { create(:issue) } + + let_it_be(:stage) do + create(:cycle_analytics_project_stage, + project: project, + start_event_identifier: :issue_created, + end_event_identifier: :issue_deployed_to_production + ) + end + + let_it_be(:stage_event_1) do + create(:cycle_analytics_issue_stage_event, + stage_event_hash_id: stage.stage_event_hash_id, + group_id: group.id, + project_id: project.id, + issue_id: issue_1.id, + author_id: project.creator.id, + milestone_id: nil, + state_id: issue_1.state_id, + end_event_timestamp: 8.months.ago + ) + end + + let_it_be(:stage_event_2) do + create(:cycle_analytics_issue_stage_event, + stage_event_hash_id: stage.stage_event_hash_id, + group_id: group.id, + project_id: project.id, + issue_id: issue_2.id, + author_id: nil, + milestone_id: milestone.id, + state_id: issue_2.state_id + ) + end + + let_it_be(:stage_event_3) do + create(:cycle_analytics_issue_stage_event, + stage_event_hash_id: stage.stage_event_hash_id, + group_id: group.id, + project_id: project.id, + issue_id: issue_3.id, + author_id: nil, + milestone_id: milestone.id, + state_id: issue_3.state_id, + start_event_timestamp: 8.months.ago, + end_event_timestamp: nil + ) + end + + let(:params) do + { + from: 1.year.ago.to_date, + to: Date.today + } + end + + subject(:issue_ids) { described_class.new(stage: stage, params: params).build.pluck(:issue_id) } + + it 'scopes the query for the given project' do + expect(issue_ids).to match_array([issue_1.id, issue_2.id]) + expect(issue_ids).not_to include([issue_outside_project.id]) + end + + describe 'author_username param' do + it 'returns stage events associated with the given author' do + params[:author_username] = project.creator.username + + expect(issue_ids).to eq([issue_1.id]) + end + + it 'returns empty result when unknown author is given' do + params[:author_username] = 'no one' + + expect(issue_ids).to be_empty + end + end + + describe 'milestone_title param' do + it 'returns stage events associated with the milestone' do + params[:milestone_title] = milestone.title + + expect(issue_ids).to eq([issue_2.id]) + end + + it 'returns empty result when unknown milestone is given' do + params[:milestone_title] = 'unknown milestone' + + expect(issue_ids).to be_empty + end + end + + describe 'label_name param' do + it 'returns stage events associated with multiple labels' do + params[:label_name] = [label_1.name, label_2.name] + + expect(issue_ids).to eq([issue_1.id]) + end + + it 'does not include records with partial label match' do + params[:label_name] = [label_1.name, 'other label'] + + expect(issue_ids).to be_empty + end + end + + describe 'assignee_username param' do + it 'returns stage events associated assignee' do + params[:assignee_username] = [user_1.username] + + expect(issue_ids).to eq([issue_2.id]) + end + end + + describe 'timestamp filtering' do + before do + params[:from] = 1.year.ago + params[:to] = 6.months.ago + end + + it 'filters by the end event time range' do + expect(issue_ids).to eq([issue_1.id]) + end + + context 'when in_progress items are requested' do + before do + params[:end_event_filter] = :in_progress + end + + it 'filters by the start event time range' do + expect(issue_ids).to eq([issue_3.id]) + end + end + end +end diff --git a/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb new file mode 100644 index 00000000000..045cdb129cb --- /dev/null +++ b/spec/lib/gitlab/analytics/cycle_analytics/aggregated/records_fetcher_spec.rb @@ -0,0 +1,130 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher do + let_it_be(:project) { create(:project) } + let_it_be(:issue_1) { create(:issue, project: project) } + let_it_be(:issue_2) { create(:issue, project: project) } + let_it_be(:issue_3) { create(:issue, project: project) } + + let_it_be(:stage_event_1) { create(:cycle_analytics_issue_stage_event, issue_id: issue_1.id, start_event_timestamp: 2.years.ago, end_event_timestamp: 1.year.ago) } # duration: 1 year + let_it_be(:stage_event_2) { create(:cycle_analytics_issue_stage_event, issue_id: issue_2.id, start_event_timestamp: 5.years.ago, end_event_timestamp: 2.years.ago) } # duration: 3 years + let_it_be(:stage_event_3) { create(:cycle_analytics_issue_stage_event, issue_id: issue_3.id, start_event_timestamp: 6.years.ago, end_event_timestamp: 3.months.ago) } # duration: 5+ years + + let_it_be(:stage) { create(:cycle_analytics_project_stage, start_event_identifier: :issue_created, end_event_identifier: :issue_deployed_to_production, project: project) } + + let(:params) { {} } + + subject(:records_fetcher) do + described_class.new(stage: stage, query: Analytics::CycleAnalytics::IssueStageEvent.all, params: params) + end + + shared_examples 'match returned records' do + it 'returns issues in the correct order' do + returned_iids = records_fetcher.serialized_records.pluck(:iid).map(&:to_i) + + expect(returned_iids).to eq(expected_issue_ids) + end + end + + describe '#serialized_records' do + describe 'sorting' do + context 'when sorting by end event DESC' do + let(:expected_issue_ids) { [issue_3.iid, issue_1.iid, issue_2.iid] } + + before do + params[:sort] = :end_event + params[:direction] = :desc + end + + it_behaves_like 'match returned records' + end + + context 'when sorting by end event ASC' do + let(:expected_issue_ids) { [issue_2.iid, issue_1.iid, issue_3.iid] } + + before do + params[:sort] = :end_event + params[:direction] = :asc + end + + it_behaves_like 'match returned records' + end + + context 'when sorting by duration DESC' do + let(:expected_issue_ids) { [issue_3.iid, issue_2.iid, issue_1.iid] } + + before do + params[:sort] = :duration + params[:direction] = :desc + end + + it_behaves_like 'match returned records' + end + + context 'when sorting by duration ASC' do + let(:expected_issue_ids) { [issue_1.iid, issue_2.iid, issue_3.iid] } + + before do + params[:sort] = :duration + params[:direction] = :asc + end + + it_behaves_like 'match returned records' + end + end + + describe 'pagination' do + let(:expected_issue_ids) { [issue_3.iid] } + + before do + params[:sort] = :duration + params[:direction] = :asc + params[:page] = 2 + + stub_const('Gitlab::Analytics::CycleAnalytics::Aggregated::RecordsFetcher::MAX_RECORDS', 2) + end + + it_behaves_like 'match returned records' + end + + context 'when passing a block to serialized_records method' do + before do + params[:sort] = :duration + params[:direction] = :asc + end + + it 'yields the underlying stage event scope' do + stage_event_records = [] + + records_fetcher.serialized_records do |scope| + stage_event_records.concat(scope.to_a) + end + + expect(stage_event_records.map(&:issue_id)).to eq([issue_1.id, issue_2.id, issue_3.id]) + end + end + + context 'when the issue record no longer exists' do + it 'skips non-existing issue records' do + create(:cycle_analytics_issue_stage_event, { + issue_id: 0, # non-existing id + start_event_timestamp: 5.months.ago, + end_event_timestamp: 3.months.ago + }) + + stage_event_count = nil + + records_fetcher.serialized_records do |scope| + stage_event_count = scope.to_a.size + end + + issue_count = records_fetcher.serialized_records.to_a.size + + expect(stage_event_count).to eq(4) + expect(issue_count).to eq(3) + end + end + end +end diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb index 0fb99688d27..c74bcf8d678 100644 --- a/spec/lib/gitlab/application_rate_limiter_spec.rb +++ b/spec/lib/gitlab/application_rate_limiter_spec.rb @@ -3,76 +3,108 @@ require 'spec_helper' RSpec.describe Gitlab::ApplicationRateLimiter do - let(:redis) { double('redis') } - let(:user) { create(:user) } - let(:project) { create(:project) } - let(:rate_limits) do - { - test_action: { - threshold: 1, - interval: 2.minutes + let_it_be(:user) { create(:user) } + let_it_be(:project) { create(:project) } + + subject { described_class } + + describe '.throttled?', :clean_gitlab_redis_rate_limiting do + let(:rate_limits) do + { + test_action: { + threshold: 1, + interval: 2.minutes + }, + another_action: { + threshold: 2, + interval: 3.minutes + } } - } - end + end - let(:key) { rate_limits.keys[0] } + before do + allow(described_class).to receive(:rate_limits).and_return(rate_limits) + end - subject { described_class } + context 'when the key is invalid' do + context 'is provided as a Symbol' do + context 'but is not defined in the rate_limits Hash' do + it 'raises an InvalidKeyError exception' do + key = :key_not_in_rate_limits_hash - before do - allow(Gitlab::Redis::RateLimiting).to receive(:with).and_yield(redis) - allow(described_class).to receive(:rate_limits).and_return(rate_limits) - end + expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) + end + end + end - shared_examples 'action rate limiter' do - it 'increases the throttle count and sets the expiration time' do - expect(redis).to receive(:incr).with(cache_key).and_return(1) - expect(redis).to receive(:expire).with(cache_key, 120) + context 'is provided as a String' do + context 'and is a String representation of an existing key in rate_limits Hash' do + it 'raises an InvalidKeyError exception' do + key = rate_limits.keys[0].to_s - expect(subject.throttled?(key, scope: scope)).to be_falsy - end + expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) + end + end - it 'returns true if the key is throttled' do - expect(redis).to receive(:incr).with(cache_key).and_return(2) - expect(redis).not_to receive(:expire) + context 'but is not defined in any form in the rate_limits Hash' do + it 'raises an InvalidKeyError exception' do + key = 'key_not_in_rate_limits_hash' - expect(subject.throttled?(key, scope: scope)).to be_truthy + expect { subject.throttled?(key) }.to raise_error(Gitlab::ApplicationRateLimiter::InvalidKeyError) + end + end + end end - context 'when throttling is disabled' do - it 'returns false and does not set expiration time' do - expect(redis).not_to receive(:incr) - expect(redis).not_to receive(:expire) + shared_examples 'throttles based on key and scope' do + let(:start_time) { Time.current.beginning_of_hour } - expect(subject.throttled?(key, scope: scope, threshold: 0)).to be_falsy + it 'returns true when threshold is exceeded' do + travel_to(start_time) do + expect(subject.throttled?(:test_action, scope: scope)).to eq(false) + end + + travel_to(start_time + 1.minute) do + expect(subject.throttled?(:test_action, scope: scope)).to eq(true) + + # Assert that it does not affect other actions or scope + expect(subject.throttled?(:another_action, scope: scope)).to eq(false) + expect(subject.throttled?(:test_action, scope: [user])).to eq(false) + end end - end - end - context 'when the key is an array of only ActiveRecord models' do - let(:scope) { [user, project] } + it 'returns false when interval has elapsed' do + travel_to(start_time) do + expect(subject.throttled?(:test_action, scope: scope)).to eq(false) - let(:cache_key) do - "application_rate_limiter:test_action:user:#{user.id}:project:#{project.id}" - end + # another_action has a threshold of 3 so we simulate 2 requests + expect(subject.throttled?(:another_action, scope: scope)).to eq(false) + expect(subject.throttled?(:another_action, scope: scope)).to eq(false) + end - it_behaves_like 'action rate limiter' - end + travel_to(start_time + 2.minutes) do + expect(subject.throttled?(:test_action, scope: scope)).to eq(false) - context 'when they key a combination of ActiveRecord models and strings' do - let(:project) { create(:project, :public, :repository) } - let(:commit) { project.repository.commit } - let(:path) { 'app/controllers/groups_controller.rb' } - let(:scope) { [project, commit, path] } + # Assert that another_action has its own interval that hasn't elapsed + expect(subject.throttled?(:another_action, scope: scope)).to eq(true) + end + end + end + + context 'when using ActiveRecord models as scope' do + let(:scope) { [user, project] } - let(:cache_key) do - "application_rate_limiter:test_action:project:#{project.id}:commit:#{commit.sha}:#{path}" + it_behaves_like 'throttles based on key and scope' end - it_behaves_like 'action rate limiter' + context 'when using ActiveRecord models and strings as scope' do + let(:scope) { [project, 'app/controllers/groups_controller.rb'] } + + it_behaves_like 'throttles based on key and scope' + end end - describe '#log_request' do + describe '.log_request' do let(:file_path) { 'master/README.md' } let(:type) { :raw_blob_request_limit } let(:fullpath) { "/#{project.full_path}/raw/#{file_path}" } @@ -102,7 +134,7 @@ RSpec.describe Gitlab::ApplicationRateLimiter do end context 'with a current_user' do - let(:current_user) { create(:user) } + let(:current_user) { user } let(:attributes) do base_attributes.merge({ diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb index f3799c58fed..ac29bb22865 100644 --- a/spec/lib/gitlab/asciidoc_spec.rb +++ b/spec/lib/gitlab/asciidoc_spec.rb @@ -11,27 +11,13 @@ module Gitlab allow_any_instance_of(ApplicationSetting).to receive(:current).and_return(::ApplicationSetting.create_from_defaults) end - context "without project" do - let(:input) { '<b>ascii</b>' } - let(:context) { {} } - let(:html) { 'H<sub>2</sub>O' } - - it "converts the input using Asciidoctor and default options" do - expected_asciidoc_opts = { - safe: :secure, - backend: :gitlab_html5, - attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }), - extensions: be_a(Proc) - } - - expect(Asciidoctor).to receive(:convert) - .with(input, expected_asciidoc_opts).and_return(html) - - expect(render(input, context)).to eq(html) - end + shared_examples_for 'renders correct asciidoc' do + context "without project" do + let(:input) { '<b>ascii</b>' } + let(:context) { {} } + let(:html) { 'H<sub>2</sub>O' } - context "with asciidoc_opts" do - it "merges the options with default ones" do + it "converts the input using Asciidoctor and default options" do expected_asciidoc_opts = { safe: :secure, backend: :gitlab_html5, @@ -42,796 +28,839 @@ module Gitlab expect(Asciidoctor).to receive(:convert) .with(input, expected_asciidoc_opts).and_return(html) - render(input, context) + expect(render(input, context)).to eq(html) end - end - context "with requested path" do - input = <<~ADOC - Document name: {docname}. - ADOC - - it "ignores {docname} when not available" do - expect(render(input, {})).to include(input.strip) - end - - [ - ['/', '', 'root'], - ['README', 'README', 'just a filename'], - ['doc/api/', '', 'a directory'], - ['doc/api/README.adoc', 'README', 'a complete path'] - ].each do |path, basename, desc| - it "sets {docname} for #{desc}" do - expect(render(input, { requested_path: path })).to include(": #{basename}.") - end - end - end + context "with asciidoc_opts" do + it "merges the options with default ones" do + expected_asciidoc_opts = { + safe: :secure, + backend: :gitlab_html5, + attributes: described_class::DEFAULT_ADOC_ATTRS.merge({ "kroki-server-url" => nil }), + extensions: be_a(Proc) + } - context "XSS" do - items = { - 'link with extra attribute' => { - input: 'link:mylink"onmouseover="alert(1)[Click Here]', - output: "<div>\n<p><a href=\"mylink\">Click Here</a></p>\n</div>" - }, - 'link with unsafe scheme' => { - input: 'link:data://danger[Click Here]', - output: "<div>\n<p><a>Click Here</a></p>\n</div>" - }, - 'image with onerror' => { - input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]', - output: "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt='Alt text\" onerror=\"alert(7)' class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>" - }, - 'fenced code with inline script' => { - input: '```mypre"><script>alert(3)</script>', - output: "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"></span></code></pre>\n</div>\n</div>" - } - } + expect(Asciidoctor).to receive(:convert) + .with(input, expected_asciidoc_opts).and_return(html) - items.each do |name, data| - it "does not convert dangerous #{name} into HTML" do - expect(render(data[:input], context)).to include(data[:output]) + render(input, context) end end - it 'does not allow locked attributes to be overridden' do + context "with requested path" do input = <<~ADOC - {counter:max-include-depth:1234} - <|-- {max-include-depth} + Document name: {docname}. ADOC - expect(render(input, {})).not_to include('1234') - end - end + it "ignores {docname} when not available" do + expect(render(input, {})).to include(input.strip) + end - context "images" do - it "does lazy load and link image" do - input = 'image:https://localhost.com/image.png[]' - output = "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>" - expect(render(input, context)).to include(output) + [ + ['/', '', 'root'], + ['README', 'README', 'just a filename'], + ['doc/api/', '', 'a directory'], + ['doc/api/README.adoc', 'README', 'a complete path'] + ].each do |path, basename, desc| + it "sets {docname} for #{desc}" do + expect(render(input, { requested_path: path })).to include(": #{basename}.") + end + end end - it "does not automatically link image if link is explicitly defined" do - input = 'image:https://localhost.com/image.png[link=https://gitlab.com]' - output = "<div>\n<p><span><a href=\"https://gitlab.com\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>" - expect(render(input, context)).to include(output) - end - end + context "XSS" do + items = { + 'link with extra attribute' => { + input: 'link:mylink"onmouseover="alert(1)[Click Here]', + output: "<div>\n<p><a href=\"mylink\">Click Here</a></p>\n</div>" + }, + 'link with unsafe scheme' => { + input: 'link:data://danger[Click Here]', + output: "<div>\n<p><a>Click Here</a></p>\n</div>" + }, + 'image with onerror' => { + input: 'image:https://localhost.com/image.png[Alt text" onerror="alert(7)]', + output: "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt='Alt text\" onerror=\"alert(7)' class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>" + } + } - context 'with admonition' do - it 'preserves classes' do - input = <<~ADOC - NOTE: An admonition paragraph, like this note, grabs the reader’s attention. - ADOC + items.each do |name, data| + it "does not convert dangerous #{name} into HTML" do + expect(render(data[:input], context)).to include(data[:output]) + end + end - output = <<~HTML - <div class="admonitionblock"> - <table> - <tr> - <td class="icon"> - <i class="fa icon-note" title="Note"></i> - </td> - <td> - An admonition paragraph, like this note, grabs the reader’s attention. - </td> - </tr> - </table> - </div> - HTML - - expect(render(input, context)).to include(output.strip) - end - end + # `stub_feature_flags method` runs AFTER declaration of `items` above. + # So the spec in its current implementation won't pass. + # Move this test back to the items hash when removing `use_cmark_renderer` feature flag. + it "does not convert dangerous fenced code with inline script into HTML" do + input = '```mypre"><script>alert(3)</script>' + output = + if Feature.enabled?(:use_cmark_renderer) + "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code></code></pre>\n</div>\n</div>" + else + "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"></span></code></pre>\n</div>\n</div>" + end - context 'with passthrough' do - it 'removes non heading ids' do - input = <<~ADOC - ++++ - <h2 id="foo">Title</h2> - ++++ - ADOC + expect(render(input, context)).to include(output) + end - output = <<~HTML - <h2>Title</h2> - HTML + it 'does not allow locked attributes to be overridden' do + input = <<~ADOC + {counter:max-include-depth:1234} + <|-- {max-include-depth} + ADOC - expect(render(input, context)).to include(output.strip) + expect(render(input, {})).not_to include('1234') + end end - it 'removes non footnote def ids' do - input = <<~ADOC - ++++ - <div id="def">Footnote definition</div> - ++++ - ADOC - - output = <<~HTML - <div>Footnote definition</div> - HTML + context "images" do + it "does lazy load and link image" do + input = 'image:https://localhost.com/image.png[]' + output = "<div>\n<p><span><a class=\"no-attachment-icon\" href=\"https://localhost.com/image.png\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>" + expect(render(input, context)).to include(output) + end - expect(render(input, context)).to include(output.strip) + it "does not automatically link image if link is explicitly defined" do + input = 'image:https://localhost.com/image.png[link=https://gitlab.com]' + output = "<div>\n<p><span><a href=\"https://gitlab.com\" rel=\"nofollow noreferrer noopener\" target=\"_blank\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"image\" class=\"lazy\" data-src=\"https://localhost.com/image.png\"></a></span></p>\n</div>" + expect(render(input, context)).to include(output) + end end - it 'removes non footnote ref ids' do - input = <<~ADOC - ++++ - <a id="ref">Footnote reference</a> - ++++ - ADOC - - output = <<~HTML - <a>Footnote reference</a> - HTML + context 'with admonition' do + it 'preserves classes' do + input = <<~ADOC + NOTE: An admonition paragraph, like this note, grabs the reader’s attention. + ADOC - expect(render(input, context)).to include(output.strip) + output = <<~HTML + <div class="admonitionblock"> + <table> + <tr> + <td class="icon"> + <i class="fa icon-note" title="Note"></i> + </td> + <td> + An admonition paragraph, like this note, grabs the reader’s attention. + </td> + </tr> + </table> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end end - end - context 'with footnotes' do - it 'preserves ids and links' do - input = <<~ADOC - This paragraph has a footnote.footnote:[This is the text of the footnote.] - ADOC - - output = <<~HTML - <div> - <p>This paragraph has a footnote.<sup>[<a id="_footnoteref_1" href="#_footnotedef_1" title="View footnote.">1</a>]</sup></p> - </div> - <div> - <hr> - <div id="_footnotedef_1"> - <a href="#_footnoteref_1">1</a>. This is the text of the footnote. - </div> - </div> - HTML - - expect(render(input, context)).to include(output.strip) - end - end + context 'with passthrough' do + it 'removes non heading ids' do + input = <<~ADOC + ++++ + <h2 id="foo">Title</h2> + ++++ + ADOC - context 'with section anchors' do - it 'preserves ids and links' do - input = <<~ADOC - = Title + output = <<~HTML + <h2>Title</h2> + HTML - == First section + expect(render(input, context)).to include(output.strip) + end - This is the first section. + it 'removes non footnote def ids' do + input = <<~ADOC + ++++ + <div id="def">Footnote definition</div> + ++++ + ADOC - == Second section + output = <<~HTML + <div>Footnote definition</div> + HTML - This is the second section. + expect(render(input, context)).to include(output.strip) + end - == Thunder ⚡ ! + it 'removes non footnote ref ids' do + input = <<~ADOC + ++++ + <a id="ref">Footnote reference</a> + ++++ + ADOC - This is the third section. - ADOC + output = <<~HTML + <a>Footnote reference</a> + HTML - output = <<~HTML - <h1>Title</h1> - <div> - <h2 id="user-content-first-section"> - <a class="anchor" href="#user-content-first-section"></a>First section</h2> - <div> - <div> - <p>This is the first section.</p> - </div> - </div> - </div> - <div> - <h2 id="user-content-second-section"> - <a class="anchor" href="#user-content-second-section"></a>Second section</h2> - <div> - <div> - <p>This is the second section.</p> - </div> - </div> - </div> - <div> - <h2 id="user-content-thunder"> - <a class="anchor" href="#user-content-thunder"></a>Thunder ⚡ !</h2> - <div> - <div> - <p>This is the third section.</p> - </div> - </div> - </div> - HTML - - expect(render(input, context)).to include(output.strip) + expect(render(input, context)).to include(output.strip) + end end - end - - context 'with xrefs' do - it 'preserves ids' do - input = <<~ADOC - Learn how to xref:cross-references[use cross references]. - [[cross-references]]A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref). - ADOC - - output = <<~HTML - <div> - <p>Learn how to <a href="#cross-references">use cross references</a>.</p> - </div> - <div> - <p><a id="user-content-cross-references"></a>A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref).</p> - </div> - HTML + context 'with footnotes' do + it 'preserves ids and links' do + input = <<~ADOC + This paragraph has a footnote.footnote:[This is the text of the footnote.] + ADOC - expect(render(input, context)).to include(output.strip) + output = <<~HTML + <div> + <p>This paragraph has a footnote.<sup>[<a id="_footnoteref_1" href="#_footnotedef_1" title="View footnote.">1</a>]</sup></p> + </div> + <div> + <hr> + <div id="_footnotedef_1"> + <a href="#_footnoteref_1">1</a>. This is the text of the footnote. + </div> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end end - end - context 'with checklist' do - it 'preserves classes' do - input = <<~ADOC - * [x] checked - * [ ] not checked - ADOC + context 'with section anchors' do + it 'preserves ids and links' do + input = <<~ADOC + = Title + + == First section + + This is the first section. + + == Second section + + This is the second section. + + == Thunder ⚡ ! + + This is the third section. + ADOC - output = <<~HTML - <div> - <ul class="checklist"> - <li> - <p><i class="fa fa-check-square-o"></i> checked</p> - </li> - <li> - <p><i class="fa fa-square-o"></i> not checked</p> - </li> - </ul> - </div> - HTML - - expect(render(input, context)).to include(output.strip) + output = <<~HTML + <h1>Title</h1> + <div> + <h2 id="user-content-first-section"> + <a class="anchor" href="#user-content-first-section"></a>First section</h2> + <div> + <div> + <p>This is the first section.</p> + </div> + </div> + </div> + <div> + <h2 id="user-content-second-section"> + <a class="anchor" href="#user-content-second-section"></a>Second section</h2> + <div> + <div> + <p>This is the second section.</p> + </div> + </div> + </div> + <div> + <h2 id="user-content-thunder"> + <a class="anchor" href="#user-content-thunder"></a>Thunder ⚡ !</h2> + <div> + <div> + <p>This is the third section.</p> + </div> + </div> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end end - end - - context 'with marks' do - it 'preserves classes' do - input = <<~ADOC - Werewolves are allergic to #cassia cinnamon#. - - Did the werewolves read the [.small]#small print#? - Where did all the [.underline.small]#cores# run off to? + context 'with xrefs' do + it 'preserves ids' do + input = <<~ADOC + Learn how to xref:cross-references[use cross references]. + + [[cross-references]]A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref). + ADOC - We need [.line-through]#ten# make that twenty VMs. + output = <<~HTML + <div> + <p>Learn how to <a href="#cross-references">use cross references</a>.</p> + </div> + <div> + <p><a id="user-content-cross-references"></a>A link to another location within an AsciiDoc document or between AsciiDoc documents is called a cross reference (also referred to as an xref).</p> + </div> + HTML - [.big]##O##nce upon an infinite loop. - ADOC - - output = <<~HTML - <div> - <p>Werewolves are allergic to <mark>cassia cinnamon</mark>.</p> - </div> - <div> - <p>Did the werewolves read the <span class="small">small print</span>?</p> - </div> - <div> - <p>Where did all the <span class="underline small">cores</span> run off to?</p> - </div> - <div> - <p>We need <span class="line-through">ten</span> make that twenty VMs.</p> - </div> - <div> - <p><span class="big">O</span>nce upon an infinite loop.</p> - </div> - HTML - - expect(render(input, context)).to include(output.strip) + expect(render(input, context)).to include(output.strip) + end end - end - context 'with fenced block' do - it 'highlights syntax' do - input = <<~ADOC - ```js - console.log('hello world') - ``` - ADOC - - output = <<~HTML - <div> - <div> - <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre> - </div> - </div> - HTML + context 'with checklist' do + it 'preserves classes' do + input = <<~ADOC + * [x] checked + * [ ] not checked + ADOC - expect(render(input, context)).to include(output.strip) + output = <<~HTML + <div> + <ul class="checklist"> + <li> + <p><i class="fa fa-check-square-o"></i> checked</p> + </li> + <li> + <p><i class="fa fa-square-o"></i> not checked</p> + </li> + </ul> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end end - end - context 'with listing block' do - it 'highlights syntax' do - input = <<~ADOC - [source,c++] - .class.cpp - ---- - #include <stdio.h> - - for (int i = 0; i < 5; i++) { - std::cout<<"*"<<std::endl; - } - ---- - ADOC + context 'with marks' do + it 'preserves classes' do + input = <<~ADOC + Werewolves are allergic to #cassia cinnamon#. + + Did the werewolves read the [.small]#small print#? + + Where did all the [.underline.small]#cores# run off to? + + We need [.line-through]#ten# make that twenty VMs. + + [.big]##O##nce upon an infinite loop. + ADOC - output = <<~HTML - <div> - <div>class.cpp</div> - <div> - <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include <stdio.h></span></span> - <span id="LC2" class="line" lang="cpp"></span> - <span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o"><</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span> - <span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o"><<</span><span class="s">"*"</span><span class="o"><<</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span> - <span id="LC5" class="line" lang="cpp"><span class="p">}</span></span></code></pre> - </div> - </div> - HTML - - expect(render(input, context)).to include(output.strip) + output = <<~HTML + <div> + <p>Werewolves are allergic to <mark>cassia cinnamon</mark>.</p> + </div> + <div> + <p>Did the werewolves read the <span class="small">small print</span>?</p> + </div> + <div> + <p>Where did all the <span class="underline small">cores</span> run off to?</p> + </div> + <div> + <p>We need <span class="line-through">ten</span> make that twenty VMs.</p> + </div> + <div> + <p><span class="big">O</span>nce upon an infinite loop.</p> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end end - end - context 'with stem block' do - it 'does not apply syntax highlighting' do - input = <<~ADOC - [stem] - ++++ - \sqrt{4} = 2 - ++++ - ADOC + context 'with fenced block' do + it 'highlights syntax' do + input = <<~ADOC + ```js + console.log('hello world') + ``` + ADOC - output = "<div>\n<div>\n\\$ qrt{4} = 2\\$\n</div>\n</div>" + output = <<~HTML + <div> + <div> + <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre> + </div> + </div> + HTML - expect(render(input, context)).to include(output) + expect(render(input, context)).to include(output.strip) + end end - end - context 'external links' do - it 'adds the `rel` attribute to the link' do - output = render('link:https://google.com[Google]', context) + context 'with listing block' do + it 'highlights syntax' do + input = <<~ADOC + [source,c++] + .class.cpp + ---- + #include <stdio.h> + + for (int i = 0; i < 5; i++) { + std::cout<<"*"<<std::endl; + } + ---- + ADOC - expect(output).to include('rel="nofollow noreferrer noopener"') + output = <<~HTML + <div> + <div>class.cpp</div> + <div> + <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include <stdio.h></span></span> + <span id="LC2" class="line" lang="cpp"></span> + <span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o"><</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span> + <span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o"><<</span><span class="s">"*"</span><span class="o"><<</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span> + <span id="LC5" class="line" lang="cpp"><span class="p">}</span></span></code></pre> + </div> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end end - end - context 'LaTex code' do - it 'adds class js-render-math to the output' do - input = <<~MD - :stem: latexmath - - [stem] - ++++ - \sqrt{4} = 2 - ++++ - - another part - - [latexmath] - ++++ - \beta_x \gamma - ++++ + context 'with stem block' do + it 'does not apply syntax highlighting' do + input = <<~ADOC + [stem] + ++++ + \sqrt{4} = 2 + ++++ + ADOC - stem:[2+2] is 4 - MD + output = "<div>\n<div>\n\\$ qrt{4} = 2\\$\n</div>\n</div>" - expect(render(input, context)).to include('<pre data-math-style="display" class="code math js-render-math"><code>eta_x gamma</code></pre>') - expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>') + expect(render(input, context)).to include(output) + end end - end - context 'outfilesuffix' do - it 'defaults to adoc' do - output = render("Inter-document reference <<README.adoc#>>", context) + context 'external links' do + it 'adds the `rel` attribute to the link' do + output = render('link:https://google.com[Google]', context) - expect(output).to include("a href=\"README.adoc\"") + expect(output).to include('rel="nofollow noreferrer noopener"') + end end - end - context 'with mermaid diagrams' do - it 'adds class js-render-mermaid to the output' do - input = <<~MD - [mermaid] - .... - graph LR - A[Square Rect] -- Link text --> B((Circle)) - A --> C(Round Rect) - B --> D{Rhombus} - C --> D - .... - MD - - output = <<~HTML - <pre data-mermaid-style="display" class="js-render-mermaid">graph LR - A[Square Rect] -- Link text --> B((Circle)) - A --> C(Round Rect) - B --> D{Rhombus} - C --> D</pre> - HTML - - expect(render(input, context)).to include(output.strip) + context 'LaTex code' do + it 'adds class js-render-math to the output' do + input = <<~MD + :stem: latexmath + + [stem] + ++++ + \sqrt{4} = 2 + ++++ + + another part + + [latexmath] + ++++ + \beta_x \gamma + ++++ + + stem:[2+2] is 4 + MD + + expect(render(input, context)).to include('<pre data-math-style="display" class="code math js-render-math"><code>eta_x gamma</code></pre>') + expect(render(input, context)).to include('<p><code data-math-style="inline" class="code math js-render-math">2+2</code> is 4</p>') + end end - it 'applies subs in diagram block' do - input = <<~MD - :class-name: AveryLongClass + context 'outfilesuffix' do + it 'defaults to adoc' do + output = render("Inter-document reference <<README.adoc#>>", context) - [mermaid,subs=+attributes] - .... - classDiagram - Class01 <|-- {class-name} : Cool - .... - MD + expect(output).to include("a href=\"README.adoc\"") + end + end - output = <<~HTML - <pre data-mermaid-style="display" class="js-render-mermaid">classDiagram - Class01 <|-- AveryLongClass : Cool</pre> - HTML + context 'with mermaid diagrams' do + it 'adds class js-render-mermaid to the output' do + input = <<~MD + [mermaid] + .... + graph LR + A[Square Rect] -- Link text --> B((Circle)) + A --> C(Round Rect) + B --> D{Rhombus} + C --> D + .... + MD + + output = <<~HTML + <pre data-mermaid-style="display" class="js-render-mermaid">graph LR + A[Square Rect] -- Link text --> B((Circle)) + A --> C(Round Rect) + B --> D{Rhombus} + C --> D</pre> + HTML + + expect(render(input, context)).to include(output.strip) + end - expect(render(input, context)).to include(output.strip) + it 'applies subs in diagram block' do + input = <<~MD + :class-name: AveryLongClass + + [mermaid,subs=+attributes] + .... + classDiagram + Class01 <|-- {class-name} : Cool + .... + MD + + output = <<~HTML + <pre data-mermaid-style="display" class="js-render-mermaid">classDiagram + Class01 <|-- AveryLongClass : Cool</pre> + HTML + + expect(render(input, context)).to include(output.strip) + end end - end - context 'with Kroki enabled' do - before do - allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true) - allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io') - end + context 'with Kroki enabled' do + before do + allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true) + allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io') + end - it 'converts a graphviz diagram to image' do - input = <<~ADOC - [graphviz] - .... - digraph G { - Hello->World - } - .... - ADOC + it 'converts a graphviz diagram to image' do + input = <<~ADOC + [graphviz] + .... + digraph G { + Hello->World + } + .... + ADOC - output = <<~HTML - <div> - <div> - <a class="no-attachment-icon" href="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka"></a> - </div> - </div> - HTML + output = <<~HTML + <div> + <div> + <a class="no-attachment-icon" href="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/graphviz/svg/eNpLyUwvSizIUHBXqOZSUPBIzcnJ17ULzy_KSeGqBQCEzQka"></a> + </div> + </div> + HTML - expect(render(input, context)).to include(output.strip) - end + expect(render(input, context)).to include(output.strip) + end - it 'does not convert a blockdiag diagram to image' do - input = <<~ADOC - [blockdiag] - .... - blockdiag { - Kroki -> generates -> "Block diagrams"; - Kroki -> is -> "very easy!"; - - Kroki [color = "greenyellow"]; - "Block diagrams" [color = "pink"]; - "very easy!" [color = "orange"]; - } - .... - ADOC + it 'does not convert a blockdiag diagram to image' do + input = <<~ADOC + [blockdiag] + .... + blockdiag { + Kroki -> generates -> "Block diagrams"; + Kroki -> is -> "very easy!"; + + Kroki [color = "greenyellow"]; + "Block diagrams" [color = "pink"]; + "very easy!" [color = "orange"]; + } + .... + ADOC - output = <<~HTML - <div> - <div> - <pre>blockdiag { - Kroki -> generates -> "Block diagrams"; - Kroki -> is -> "very easy!"; - - Kroki [color = "greenyellow"]; - "Block diagrams" [color = "pink"]; - "very easy!" [color = "orange"]; - }</pre> - </div> - </div> - HTML - - expect(render(input, context)).to include(output.strip) - end + output = <<~HTML + <div> + <div> + <pre>blockdiag { + Kroki -> generates -> "Block diagrams"; + Kroki -> is -> "very easy!"; + + Kroki [color = "greenyellow"]; + "Block diagrams" [color = "pink"]; + "very easy!" [color = "orange"]; + }</pre> + </div> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end - it 'does not allow kroki-plantuml-include to be overridden' do - input = <<~ADOC - [plantuml, test="{counter:kroki-plantuml-include:/etc/passwd}", format="png"] - .... - class BlockProcessor + it 'does not allow kroki-plantuml-include to be overridden' do + input = <<~ADOC + [plantuml, test="{counter:kroki-plantuml-include:/etc/passwd}", format="png"] + .... + class BlockProcessor + + BlockProcessor <|-- {counter:kroki-plantuml-include} + .... + ADOC - BlockProcessor <|-- {counter:kroki-plantuml-include} - .... - ADOC + output = <<~HTML + <div> + <div> + <a class=\"no-attachment-icon\" href=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"Diagram\" class=\"lazy\" data-src=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\"></a> + </div> + </div> + HTML + + expect(render(input, {})).to include(output.strip) + end - output = <<~HTML - <div> - <div> - <a class=\"no-attachment-icon\" href=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\" target=\"_blank\" rel=\"noopener noreferrer\"><img src=\"data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==\" alt=\"Diagram\" class=\"lazy\" data-src=\"https://kroki.io/plantuml/png/eNpLzkksLlZwyslPzg4oyk9OLS7OL-LiQuUr2NTo6ipUJ-eX5pWkFlllF-VnZ-oW5CTmlZTm5uhm5iXnlKak1gIABQEb8A==\"></a> - </div> - </div> - HTML + it 'does not allow kroki-server-url to be overridden' do + input = <<~ADOC + [plantuml, test="{counter:kroki-server-url:evilsite}", format="png"] + .... + class BlockProcessor + + BlockProcessor + .... + ADOC - expect(render(input, {})).to include(output.strip) + expect(render(input, {})).not_to include('evilsite') + end end - it 'does not allow kroki-server-url to be overridden' do - input = <<~ADOC - [plantuml, test="{counter:kroki-server-url:evilsite}", format="png"] - .... - class BlockProcessor + context 'with Kroki and BlockDiag (additional format) enabled' do + before do + allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true) + allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io') + allow_any_instance_of(ApplicationSetting).to receive(:kroki_formats_blockdiag).and_return(true) + end - BlockProcessor - .... - ADOC + it 'converts a blockdiag diagram to image' do + input = <<~ADOC + [blockdiag] + .... + blockdiag { + Kroki -> generates -> "Block diagrams"; + Kroki -> is -> "very easy!"; + + Kroki [color = "greenyellow"]; + "Block diagrams" [color = "pink"]; + "very easy!" [color = "orange"]; + } + .... + ADOC - expect(render(input, {})).not_to include('evilsite') + output = <<~HTML + <div> + <div> + <a class="no-attachment-icon" href="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w==" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w=="></a> + </div> + </div> + HTML + + expect(render(input, context)).to include(output.strip) + end end end - context 'with Kroki and BlockDiag (additional format) enabled' do - before do - allow_any_instance_of(ApplicationSetting).to receive(:kroki_enabled).and_return(true) - allow_any_instance_of(ApplicationSetting).to receive(:kroki_url).and_return('https://kroki.io') - allow_any_instance_of(ApplicationSetting).to receive(:kroki_formats_blockdiag).and_return(true) + context 'with project' do + let(:context) do + { + commit: commit, + project: project, + ref: ref, + requested_path: requested_path + } end - it 'converts a blockdiag diagram to image' do - input = <<~ADOC - [blockdiag] - .... - blockdiag { - Kroki -> generates -> "Block diagrams"; - Kroki -> is -> "very easy!"; - - Kroki [color = "greenyellow"]; - "Block diagrams" [color = "pink"]; - "very easy!" [color = "orange"]; - } - .... - ADOC - - output = <<~HTML - <div> - <div> - <a class="no-attachment-icon" href="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w==" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Diagram" class="lazy" data-src="https://kroki.io/blockdiag/svg/eNpdzDEKQjEQhOHeU4zpPYFoYesRxGJ9bwghMSsbUYJ4d10UCZbDfPynolOek0Q8FsDeNCestoisNLmy-Qg7R3Blcm5hPcr0ITdaB6X15fv-_YdJixo2CNHI2lmK3sPRA__RwV5SzV80ZAegJjXSyfMFptc71w=="></a> - </div> - </div> - HTML + let(:commit) { project.commit(ref) } + let(:project) { create(:project, :repository) } + let(:ref) { 'asciidoc' } + let(:requested_path) { '/' } - expect(render(input, context)).to include(output.strip) - end - end - end + context 'include directive' do + subject(:output) { render(input, context) } - context 'with project' do - let(:context) do - { - commit: commit, - project: project, - ref: ref, - requested_path: requested_path - } - end + let(:input) { "Include this:\n\ninclude::#{include_path}[]" } - let(:commit) { project.commit(ref) } - let(:project) { create(:project, :repository) } - let(:ref) { 'asciidoc' } - let(:requested_path) { '/' } - - context 'include directive' do - subject(:output) { render(input, context) } + before do + current_file = requested_path + current_file += 'README.adoc' if requested_path.end_with? '/' - let(:input) { "Include this:\n\ninclude::#{include_path}[]" } + create_file(current_file, "= AsciiDoc\n") + end - before do - current_file = requested_path - current_file += 'README.adoc' if requested_path.end_with? '/' + def many_includes(target) + Array.new(10, "include::#{target}[]").join("\n") + end - create_file(current_file, "= AsciiDoc\n") - end + context 'cyclic imports' do + before do + create_file('doc/api/a.adoc', many_includes('b.adoc')) + create_file('doc/api/b.adoc', many_includes('a.adoc')) + end - def many_includes(target) - Array.new(10, "include::#{target}[]").join("\n") - end + let(:include_path) { 'a.adoc' } + let(:requested_path) { 'doc/api/README.md' } - context 'cyclic imports' do - before do - create_file('doc/api/a.adoc', many_includes('b.adoc')) - create_file('doc/api/b.adoc', many_includes('a.adoc')) + it 'completes successfully' do + is_expected.to include('<p>Include this:</p>') + end end - let(:include_path) { 'a.adoc' } - let(:requested_path) { 'doc/api/README.md' } + context 'with path to non-existing file' do + let(:include_path) { 'not-exists.adoc' } - it 'completes successfully' do - is_expected.to include('<p>Include this:</p>') + it 'renders Unresolved directive placeholder' do + is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>") + end end - end - context 'with path to non-existing file' do - let(:include_path) { 'not-exists.adoc' } + shared_examples :invalid_include do + let(:include_path) { 'dk.png' } - it 'renders Unresolved directive placeholder' do - is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>") - end - end + before do + allow(project.repository).to receive(:blob_at).and_return(blob) + end - shared_examples :invalid_include do - let(:include_path) { 'dk.png' } + it 'does not read the blob' do + expect(blob).not_to receive(:data) + end - before do - allow(project.repository).to receive(:blob_at).and_return(blob) + it 'renders Unresolved directive placeholder' do + is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>") + end end - it 'does not read the blob' do - expect(blob).not_to receive(:data) - end + context 'with path to a binary file' do + let(:blob) { fake_blob(path: 'dk.png', binary: true) } - it 'renders Unresolved directive placeholder' do - is_expected.to include("<strong>[ERROR: include::#{include_path}[] - unresolved directive]</strong>") + include_examples :invalid_include end - end - - context 'with path to a binary file' do - let(:blob) { fake_blob(path: 'dk.png', binary: true) } - include_examples :invalid_include - end + context 'with path to file in external storage' do + let(:blob) { fake_blob(path: 'dk.png', lfs: true) } - context 'with path to file in external storage' do - let(:blob) { fake_blob(path: 'dk.png', lfs: true) } + before do + allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) + project.update_attribute(:lfs_enabled, true) + end - before do - allow(Gitlab.config.lfs).to receive(:enabled).and_return(true) - project.update_attribute(:lfs_enabled, true) + include_examples :invalid_include end - include_examples :invalid_include - end + context 'with path to a textual file' do + let(:include_path) { 'sample.adoc' } - context 'with path to a textual file' do - let(:include_path) { 'sample.adoc' } + before do + create_file(file_path, "Content from #{include_path}") + end - before do - create_file(file_path, "Content from #{include_path}") - end - - shared_examples :valid_include do - [ - ['/doc/sample.adoc', 'doc/sample.adoc', 'absolute path'], - ['sample.adoc', 'doc/api/sample.adoc', 'relative path'], - ['./sample.adoc', 'doc/api/sample.adoc', 'relative path with leading ./'], - ['../sample.adoc', 'doc/sample.adoc', 'relative path to a file up one directory'], - ['../../sample.adoc', 'sample.adoc', 'relative path for a file up multiple directories'] - ].each do |include_path_, file_path_, desc| - context "the file is specified by #{desc}" do - let(:include_path) { include_path_ } - let(:file_path) { file_path_ } - - it 'includes content of the file' do - is_expected.to include('<p>Include this:</p>') - is_expected.to include("<p>Content from #{include_path}</p>") + shared_examples :valid_include do + [ + ['/doc/sample.adoc', 'doc/sample.adoc', 'absolute path'], + ['sample.adoc', 'doc/api/sample.adoc', 'relative path'], + ['./sample.adoc', 'doc/api/sample.adoc', 'relative path with leading ./'], + ['../sample.adoc', 'doc/sample.adoc', 'relative path to a file up one directory'], + ['../../sample.adoc', 'sample.adoc', 'relative path for a file up multiple directories'] + ].each do |include_path_, file_path_, desc| + context "the file is specified by #{desc}" do + let(:include_path) { include_path_ } + let(:file_path) { file_path_ } + + it 'includes content of the file' do + is_expected.to include('<p>Include this:</p>') + is_expected.to include("<p>Content from #{include_path}</p>") + end end end end - end - context 'when requested path is a file in the repo' do - let(:requested_path) { 'doc/api/README.adoc' } + context 'when requested path is a file in the repo' do + let(:requested_path) { 'doc/api/README.adoc' } - include_examples :valid_include + include_examples :valid_include - context 'without a commit (only ref)' do - let(:commit) { nil } + context 'without a commit (only ref)' do + let(:commit) { nil } - include_examples :valid_include + include_examples :valid_include + end end - end - context 'when requested path is a directory in the repo' do - let(:requested_path) { 'doc/api/' } + context 'when requested path is a directory in the repo' do + let(:requested_path) { 'doc/api/' } - include_examples :valid_include + include_examples :valid_include - context 'without a commit (only ref)' do - let(:commit) { nil } + context 'without a commit (only ref)' do + let(:commit) { nil } - include_examples :valid_include + include_examples :valid_include + end end end - end - - context 'when repository is passed into the context' do - let(:wiki_repo) { project.wiki.repository } - let(:include_path) { 'wiki_file.adoc' } - before do - project.create_wiki - context.merge!(repository: wiki_repo) - end + context 'when repository is passed into the context' do + let(:wiki_repo) { project.wiki.repository } + let(:include_path) { 'wiki_file.adoc' } - context 'when the file exists' do before do - create_file(include_path, 'Content from wiki', repository: wiki_repo) + project.create_wiki + context.merge!(repository: wiki_repo) end - it { is_expected.to include('<p>Content from wiki</p>') } - end - - context 'when the file does not exist' do - it { is_expected.to include("[ERROR: include::#{include_path}[] - unresolved directive]")} - end - end - - context 'recursive includes with relative paths' do - let(:input) do - <<~ADOC - Source: requested file + context 'when the file exists' do + before do + create_file(include_path, 'Content from wiki', repository: wiki_repo) + end - include::doc/README.adoc[] + it { is_expected.to include('<p>Content from wiki</p>') } + end - include::license.adoc[] - ADOC + context 'when the file does not exist' do + it { is_expected.to include("[ERROR: include::#{include_path}[] - unresolved directive]")} + end end - before do - create_file 'doc/README.adoc', <<~ADOC - Source: doc/README.adoc - - include::../license.adoc[] + context 'recursive includes with relative paths' do + let(:input) do + <<~ADOC + Source: requested file + + include::doc/README.adoc[] + + include::license.adoc[] + ADOC + end - include::api/hello.adoc[] - ADOC - create_file 'license.adoc', <<~ADOC - Source: license.adoc - ADOC - create_file 'doc/api/hello.adoc', <<~ADOC - Source: doc/api/hello.adoc + before do + create_file 'doc/README.adoc', <<~ADOC + Source: doc/README.adoc + + include::../license.adoc[] + + include::api/hello.adoc[] + ADOC + create_file 'license.adoc', <<~ADOC + Source: license.adoc + ADOC + create_file 'doc/api/hello.adoc', <<~ADOC + Source: doc/api/hello.adoc + + include::./common.adoc[] + ADOC + create_file 'doc/api/common.adoc', <<~ADOC + Source: doc/api/common.adoc + ADOC + end - include::./common.adoc[] - ADOC - create_file 'doc/api/common.adoc', <<~ADOC - Source: doc/api/common.adoc - ADOC + it 'includes content of the included files recursively' do + expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip + Source: requested file + Source: doc/README.adoc + Source: license.adoc + Source: doc/api/hello.adoc + Source: doc/api/common.adoc + Source: license.adoc + ADOC + end end - it 'includes content of the included files recursively' do - expect(output.gsub(/<[^>]+>/, '').gsub(/\n\s*/, "\n").strip).to eq <<~ADOC.strip - Source: requested file - Source: doc/README.adoc - Source: license.adoc - Source: doc/api/hello.adoc - Source: doc/api/common.adoc - Source: license.adoc - ADOC + def create_file(path, content, repository: project.repository) + repository.create_file(project.creator, path, content, + message: "Add #{path}", branch_name: 'asciidoc') end end + end + end - def create_file(path, content, repository: project.repository) - repository.create_file(project.creator, path, content, - message: "Add #{path}", branch_name: 'asciidoc') - end + context 'using ruby-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: false) + end + + it_behaves_like 'renders correct asciidoc' + end + + context 'using c-based HTML renderer' do + before do + stub_feature_flags(use_cmark_renderer: true) end + + it_behaves_like 'renders correct asciidoc' end def render(*args) diff --git a/spec/lib/gitlab/auth/auth_finders_spec.rb b/spec/lib/gitlab/auth/auth_finders_spec.rb index b0522e269e0..f1c891b2adb 100644 --- a/spec/lib/gitlab/auth/auth_finders_spec.rb +++ b/spec/lib/gitlab/auth/auth_finders_spec.rb @@ -873,45 +873,65 @@ RSpec.describe Gitlab::Auth::AuthFinders do end describe '#find_user_from_job_token' do + let(:token) { job.token } + subject { find_user_from_job_token } - context 'when the token is in the headers' do - before do - set_header(described_class::JOB_TOKEN_HEADER, token) + shared_examples 'finds user when job token allowed' do + context 'when the token is in the headers' do + before do + set_header(described_class::JOB_TOKEN_HEADER, token) + end + + it_behaves_like 'find user from job token' end - it_behaves_like 'find user from job token' - end + context 'when the token is in the job_token param' do + before do + set_param(described_class::JOB_TOKEN_PARAM, token) + end - context 'when the token is in the job_token param' do - before do - set_param(described_class::JOB_TOKEN_PARAM, token) + it_behaves_like 'find user from job token' end - it_behaves_like 'find user from job token' - end + context 'when the token is in the token param' do + before do + set_param(described_class::RUNNER_JOB_TOKEN_PARAM, token) + end - context 'when the token is in the token param' do - before do - set_param(described_class::RUNNER_JOB_TOKEN_PARAM, token) + it_behaves_like 'find user from job token' end + end - it_behaves_like 'find user from job token' + context 'when route setting allows job_token' do + let(:route_authentication_setting) { { job_token_allowed: true } } + + include_examples 'finds user when job token allowed' end - context 'when the job token is provided via basic auth' do + context 'when route setting is basic auth' do let(:route_authentication_setting) { { job_token_allowed: :basic_auth } } - let(:username) { ::Gitlab::Auth::CI_JOB_USER } - let(:token) { job.token } - before do - set_basic_auth_header(username, token) + context 'when the token is provided via basic auth' do + let(:username) { ::Gitlab::Auth::CI_JOB_USER } + + before do + set_basic_auth_header(username, token) + end + + it { is_expected.to eq(user) } end - it { is_expected.to eq(user) } + include_examples 'finds user when job token allowed' + end - context 'credentials are provided but route setting is incorrect' do - let(:route_authentication_setting) { { job_token_allowed: :unknown } } + context 'when route setting job_token_allowed is invalid' do + let(:route_authentication_setting) { { job_token_allowed: false } } + + context 'when the token is provided' do + before do + set_header(described_class::JOB_TOKEN_HEADER, token) + end it { is_expected.to be_nil } end diff --git a/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb b/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb index 81b8b5dde08..0b29163671c 100644 --- a/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb +++ b/spec/lib/gitlab/background_migration/add_modified_to_approval_merge_request_rule_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::AddModifiedToApprovalMergeRequestRule, schema: 20200817195628 do +RSpec.describe Gitlab::BackgroundMigration::AddModifiedToApprovalMergeRequestRule, schema: 20181228175414 do let(:determine_if_rules_are_modified) { described_class.new } let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') } diff --git a/spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb b/spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb new file mode 100644 index 00000000000..b50a55a9e41 --- /dev/null +++ b/spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::AddPrimaryEmailToEmailsIfUserConfirmed do + let(:users) { table(:users) } + let(:emails) { table(:emails) } + + let!(:unconfirmed_user) { users.create!(name: 'unconfirmed', email: 'unconfirmed@example.com', confirmed_at: nil, projects_limit: 100) } + let!(:confirmed_user_1) { users.create!(name: 'confirmed-1', email: 'confirmed-1@example.com', confirmed_at: 1.day.ago, projects_limit: 100) } + let!(:confirmed_user_2) { users.create!(name: 'confirmed-2', email: 'confirmed-2@example.com', confirmed_at: 1.day.ago, projects_limit: 100) } + let!(:email) { emails.create!(user_id: confirmed_user_1.id, email: 'confirmed-1@example.com', confirmed_at: 1.day.ago) } + + let(:perform) { described_class.new.perform(users.first.id, users.last.id) } + + it 'adds the primary email of confirmed users to Emails, unless already added', :aggregate_failures do + expect(emails.where(email: [unconfirmed_user.email, confirmed_user_2.email])).to be_empty + + expect { perform }.not_to raise_error + + expect(emails.where(email: unconfirmed_user.email).count).to eq(0) + expect(emails.where(email: confirmed_user_1.email, user_id: confirmed_user_1.id).count).to eq(1) + expect(emails.where(email: confirmed_user_2.email, user_id: confirmed_user_2.id).count).to eq(1) + + email_2 = emails.find_by(email: confirmed_user_2.email, user_id: confirmed_user_2.id) + expect(email_2.confirmed_at).to eq(confirmed_user_2.reload.confirmed_at) + end + + it 'sets timestamps on the created Emails' do + perform + + email_2 = emails.find_by(email: confirmed_user_2.email, user_id: confirmed_user_2.id) + + expect(email_2.created_at).not_to be_nil + expect(email_2.updated_at).not_to be_nil + end + + context 'when a range of IDs is specified' do + let!(:confirmed_user_3) { users.create!(name: 'confirmed-3', email: 'confirmed-3@example.com', confirmed_at: 1.hour.ago, projects_limit: 100) } + let!(:confirmed_user_4) { users.create!(name: 'confirmed-4', email: 'confirmed-4@example.com', confirmed_at: 1.hour.ago, projects_limit: 100) } + + it 'only acts on the specified range of IDs', :aggregate_failures do + expect do + described_class.new.perform(confirmed_user_2.id, confirmed_user_3.id) + end.to change { Email.count }.by(2) + expect(emails.where(email: confirmed_user_4.email).count).to eq(0) + end + end +end diff --git a/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb index 49fa7b41916..6ab1e3ecd70 100644 --- a/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_artifact_expiry_date_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20201111152859 do +RSpec.describe Gitlab::BackgroundMigration::BackfillArtifactExpiryDate, :migration, schema: 20181228175414 do subject(:perform) { migration.perform(1, 99) } let(:migration) { described_class.new } diff --git a/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb b/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb index 54c14e7a4b8..1404ada3647 100644 --- a/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillDeploymentClustersFromDeployments, :migration, schema: 20200227140242 do +RSpec.describe Gitlab::BackgroundMigration::BackfillDeploymentClustersFromDeployments, :migration, schema: 20181228175414 do subject { described_class.new } describe '#perform' do diff --git a/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb b/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb deleted file mode 100644 index 4bf59a02a31..00000000000 --- a/spec/lib/gitlab/background_migration/backfill_design_internal_ids_spec.rb +++ /dev/null @@ -1,69 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::BackfillDesignInternalIds, :migration, schema: 20201030203854 do - subject { described_class.new(designs) } - - let_it_be(:namespaces) { table(:namespaces) } - let_it_be(:projects) { table(:projects) } - let_it_be(:designs) { table(:design_management_designs) } - - let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - let(:project) { projects.create!(namespace_id: namespace.id) } - let(:project_2) { projects.create!(namespace_id: namespace.id) } - - def create_design!(proj = project) - designs.create!(project_id: proj.id, filename: generate(:filename)) - end - - def migrate! - relation = designs.where(project_id: [project.id, project_2.id]).select(:project_id).distinct - - subject.perform(relation) - end - - it 'backfills the iid for designs' do - 3.times { create_design! } - - expect do - migrate! - end.to change { designs.pluck(:iid) }.from(contain_exactly(nil, nil, nil)).to(contain_exactly(1, 2, 3)) - end - - it 'scopes IIDs and handles range and starting-point correctly' do - create_design!.update!(iid: 10) - create_design!.update!(iid: 12) - create_design!(project_2).update!(iid: 7) - project_3 = projects.create!(namespace_id: namespace.id) - - 2.times { create_design! } - 2.times { create_design!(project_2) } - 2.times { create_design!(project_3) } - - migrate! - - expect(designs.where(project_id: project.id).pluck(:iid)).to contain_exactly(10, 12, 13, 14) - expect(designs.where(project_id: project_2.id).pluck(:iid)).to contain_exactly(7, 8, 9) - expect(designs.where(project_id: project_3.id).pluck(:iid)).to contain_exactly(nil, nil) - end - - it 'updates the internal ID records' do - design = create_design! - 2.times { create_design! } - design.update!(iid: 10) - scope = { project_id: project.id } - usage = :design_management_designs - init = ->(_d, _s) { 0 } - - ::InternalId.track_greatest(design, scope, usage, 10, init) - - migrate! - - next_iid = ::InternalId.generate_next(design, scope, usage, init) - - expect(designs.pluck(:iid)).to contain_exactly(10, 11, 12) - expect(design.reload.iid).to eq(10) - expect(next_iid).to eq(13) - end -end diff --git a/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb b/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb index 550bdc484c9..9194525e713 100644 --- a/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_environment_id_deployment_merge_requests_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillEnvironmentIdDeploymentMergeRequests, schema: 20200312134637 do +RSpec.describe Gitlab::BackgroundMigration::BackfillEnvironmentIdDeploymentMergeRequests, schema: 20181228175414 do let(:environments) { table(:environments) } let(:merge_requests) { table(:merge_requests) } let(:deployments) { table(:deployments) } diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb index 58864aac084..446d62bbd2a 100644 --- a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20201028182809 do +RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20181228175414 do let_it_be(:jira_integration_temp) { described_class::JiraServiceTemp } let_it_be(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp } let_it_be(:atlassian_host) { 'https://api.atlassian.net' } diff --git a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb index c2daa35703d..d33f52514da 100644 --- a/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_merge_request_cleanup_schedules_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20201103110018 do +RSpec.describe Gitlab::BackgroundMigration::BackfillMergeRequestCleanupSchedules, schema: 20181228175414 do let(:merge_requests) { table(:merge_requests) } let(:cleanup_schedules) { table(:merge_request_cleanup_schedules) } let(:metrics) { table(:merge_request_metrics) } diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb index 43e76a2952e..0f8adca2ca4 100644 --- a/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_namespace_settings_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceSettings, schema: 20200703125016 do +RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceSettings, schema: 20181228175414 do let(:namespaces) { table(:namespaces) } let(:namespace_settings) { table(:namespace_settings) } let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } diff --git a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb index 48c5674822a..e6b0db2ab73 100644 --- a/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillProjectSettings, schema: 20200114113341 do +RSpec.describe Gitlab::BackgroundMigration::BackfillProjectSettings, schema: 20181228175414 do let(:projects) { table(:projects) } let(:project_settings) { table(:project_settings) } let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } diff --git a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb index 9ce6a3227b5..3468df3dccd 100644 --- a/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb +++ b/spec/lib/gitlab/background_migration/backfill_push_rules_id_in_projects_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::BackfillPushRulesIdInProjects, :migration, schema: 2020_03_25_162730 do +RSpec.describe Gitlab::BackgroundMigration::BackfillPushRulesIdInProjects, :migration, schema: 20181228175414 do let(:push_rules) { table(:push_rules) } let(:projects) { table(:projects) } let(:project_settings) { table(:project_settings) } diff --git a/spec/lib/gitlab/background_migration/backfill_user_namespace_spec.rb b/spec/lib/gitlab/background_migration/backfill_user_namespace_spec.rb new file mode 100644 index 00000000000..395248b786d --- /dev/null +++ b/spec/lib/gitlab/background_migration/backfill_user_namespace_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::BackfillUserNamespace, :migration, schema: 20210930211936 do + let(:migration) { described_class.new } + let(:namespaces_table) { table(:namespaces) } + + let(:table_name) { 'namespaces' } + let(:batch_column) { :id } + let(:sub_batch_size) { 100 } + let(:pause_ms) { 0 } + + subject(:perform_migration) { migration.perform(1, 10, table_name, batch_column, sub_batch_size, pause_ms) } + + before do + namespaces_table.create!(id: 1, name: 'test1', path: 'test1', type: nil) + namespaces_table.create!(id: 2, name: 'test2', path: 'test2', type: 'User') + namespaces_table.create!(id: 3, name: 'test3', path: 'test3', type: 'Group') + namespaces_table.create!(id: 4, name: 'test4', path: 'test4', type: nil) + namespaces_table.create!(id: 11, name: 'test11', path: 'test11', type: nil) + end + + it 'backfills `type` for the selected records', :aggregate_failures do + queries = ActiveRecord::QueryRecorder.new do + perform_migration + end + + expect(queries.count).to eq(3) + expect(namespaces_table.where(type: 'User').count).to eq 3 + expect(namespaces_table.where(type: 'User').pluck(:id)).to match_array([1, 2, 4]) + end + + it 'tracks timings of queries' do + expect(migration.batch_metrics.timings).to be_empty + + expect { perform_migration }.to change { migration.batch_metrics.timings } + end +end diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb index 3e378db04d4..d4fc24d0559 100644 --- a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb +++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob do - let(:table_name) { :copy_primary_key_test } + let(:table_name) { :_test_copy_primary_key_test } let(:test_table) { table(table_name) } let(:sub_batch_size) { 1000 } let(:pause_ms) { 0 } diff --git a/spec/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics_spec.rb b/spec/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics_spec.rb deleted file mode 100644 index 71bb794d539..00000000000 --- a/spec/lib/gitlab/background_migration/copy_merge_request_target_project_to_merge_request_metrics_spec.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::CopyMergeRequestTargetProjectToMergeRequestMetrics, :migration, schema: 20200723125205 do - let(:migration) { described_class.new } - - let_it_be(:namespaces) { table(:namespaces) } - let_it_be(:projects) { table(:projects) } - let_it_be(:merge_requests) { table(:merge_requests) } - let_it_be(:metrics) { table(:merge_request_metrics) } - - let!(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') } - let!(:project_1) { projects.create!(namespace_id: namespace.id) } - let!(:project_2) { projects.create!(namespace_id: namespace.id) } - let!(:merge_request_to_migrate_1) { merge_requests.create!(source_branch: 'a', target_branch: 'b', target_project_id: project_1.id) } - let!(:merge_request_to_migrate_2) { merge_requests.create!(source_branch: 'c', target_branch: 'd', target_project_id: project_2.id) } - let!(:merge_request_without_metrics) { merge_requests.create!(source_branch: 'e', target_branch: 'f', target_project_id: project_2.id) } - - let!(:metrics_1) { metrics.create!(merge_request_id: merge_request_to_migrate_1.id) } - let!(:metrics_2) { metrics.create!(merge_request_id: merge_request_to_migrate_2.id) } - - let(:merge_request_ids) { [merge_request_to_migrate_1.id, merge_request_to_migrate_2.id, merge_request_without_metrics.id] } - - subject { migration.perform(merge_request_ids.min, merge_request_ids.max) } - - it 'copies `target_project_id` to the associated `merge_request_metrics` record' do - subject - - expect(metrics_1.reload.target_project_id).to eq(project_1.id) - expect(metrics_2.reload.target_project_id).to eq(project_2.id) - end - - it 'does not create metrics record when it is missing' do - subject - - expect(metrics.find_by_merge_request_id(merge_request_without_metrics.id)).to be_nil - end -end diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb index c4beb719e1e..b83dc6fff7a 100644 --- a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb +++ b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20201110110454 do +RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20181228175414 do let_it_be(:background_migration_jobs) { table(:background_migration_jobs) } let_it_be(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } let_it_be(:users) { table(:users) } diff --git a/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb new file mode 100644 index 00000000000..c343ee438b8 --- /dev/null +++ b/spec/lib/gitlab/background_migration/fix_merge_request_diff_commit_users_spec.rb @@ -0,0 +1,316 @@ +# frozen_string_literal: true + +require 'spec_helper' + +# The underlying migration relies on the global models (e.g. Project). This +# means we also need to use FactoryBot factories to ensure everything is +# operating using the same types. If we use `table()` and similar methods we +# would have to duplicate a lot of logic just for these tests. +# +# rubocop: disable RSpec/FactoriesInMigrationSpecs +RSpec.describe Gitlab::BackgroundMigration::FixMergeRequestDiffCommitUsers do + let(:migration) { described_class.new } + + describe '#perform' do + context 'when the project exists' do + it 'processes the project' do + project = create(:project) + + expect(migration).to receive(:process).with(project) + expect(migration).to receive(:schedule_next_job) + + migration.perform(project.id) + end + + it 'marks the background job as finished' do + project = create(:project) + + Gitlab::Database::BackgroundMigrationJob.create!( + class_name: 'FixMergeRequestDiffCommitUsers', + arguments: [project.id] + ) + + migration.perform(project.id) + + job = Gitlab::Database::BackgroundMigrationJob + .find_by(class_name: 'FixMergeRequestDiffCommitUsers') + + expect(job.status).to eq('succeeded') + end + end + + context 'when the project does not exist' do + it 'does nothing' do + expect(migration).not_to receive(:process) + expect(migration).to receive(:schedule_next_job) + + migration.perform(-1) + end + end + end + + describe '#process' do + it 'processes the merge requests of the project' do + project = create(:project, :repository) + commit = project.commit + mr = create( + :merge_request_with_diffs, + source_project: project, + target_project: project + ) + + diff = mr.merge_request_diffs.first + + create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: commit.sha, + relative_order: 9000 + ) + + migration.process(project) + + updated = diff + .merge_request_diff_commits + .find_by(sha: commit.sha, relative_order: 9000) + + expect(updated.commit_author_id).not_to be_nil + expect(updated.committer_id).not_to be_nil + end + end + + describe '#update_commit' do + let(:project) { create(:project, :repository) } + let(:mr) do + create( + :merge_request_with_diffs, + source_project: project, + target_project: project + ) + end + + let(:diff) { mr.merge_request_diffs.first } + let(:commit) { project.commit } + + def update_row(migration, project, diff, row) + migration.update_commit(project, row) + + diff + .merge_request_diff_commits + .find_by(sha: row.sha, relative_order: row.relative_order) + end + + it 'populates missing commit authors' do + commit_row = create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: commit.sha, + relative_order: 9000 + ) + + updated = update_row(migration, project, diff, commit_row) + + expect(updated.commit_author.name).to eq(commit.to_hash[:author_name]) + expect(updated.commit_author.email).to eq(commit.to_hash[:author_email]) + end + + it 'populates missing committers' do + commit_row = create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: commit.sha, + relative_order: 9000 + ) + + updated = update_row(migration, project, diff, commit_row) + + expect(updated.committer.name).to eq(commit.to_hash[:committer_name]) + expect(updated.committer.email).to eq(commit.to_hash[:committer_email]) + end + + it 'leaves existing commit authors as-is' do + user = create(:merge_request_diff_commit_user) + commit_row = create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: commit.sha, + relative_order: 9000, + commit_author: user + ) + + updated = update_row(migration, project, diff, commit_row) + + expect(updated.commit_author).to eq(user) + end + + it 'leaves existing committers as-is' do + user = create(:merge_request_diff_commit_user) + commit_row = create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: commit.sha, + relative_order: 9000, + committer: user + ) + + updated = update_row(migration, project, diff, commit_row) + + expect(updated.committer).to eq(user) + end + + it 'does nothing when both the author and committer are present' do + user = create(:merge_request_diff_commit_user) + commit_row = create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: commit.sha, + relative_order: 9000, + committer: user, + commit_author: user + ) + + recorder = ActiveRecord::QueryRecorder.new do + migration.update_commit(project, commit_row) + end + + expect(recorder.count).to be_zero + end + + it 'does nothing if the commit does not exist in Git' do + user = create(:merge_request_diff_commit_user) + commit_row = create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: 'kittens', + relative_order: 9000, + committer: user, + commit_author: user + ) + + recorder = ActiveRecord::QueryRecorder.new do + migration.update_commit(project, commit_row) + end + + expect(recorder.count).to be_zero + end + + it 'does nothing when the committer/author are missing in the Git commit' do + user = create(:merge_request_diff_commit_user) + commit_row = create( + :merge_request_diff_commit, + merge_request_diff: diff, + sha: commit.sha, + relative_order: 9000, + committer: user, + commit_author: user + ) + + allow(migration).to receive(:find_or_create_user).and_return(nil) + + recorder = ActiveRecord::QueryRecorder.new do + migration.update_commit(project, commit_row) + end + + expect(recorder.count).to be_zero + end + end + + describe '#schedule_next_job' do + it 'schedules the next background migration' do + Gitlab::Database::BackgroundMigrationJob + .create!(class_name: 'FixMergeRequestDiffCommitUsers', arguments: [42]) + + expect(BackgroundMigrationWorker) + .to receive(:perform_in) + .with(2.minutes, 'FixMergeRequestDiffCommitUsers', [42]) + + migration.schedule_next_job + end + + it 'does nothing when there are no jobs' do + expect(BackgroundMigrationWorker) + .not_to receive(:perform_in) + + migration.schedule_next_job + end + end + + describe '#find_commit' do + let(:project) { create(:project, :repository) } + + it 'finds a commit using Git' do + commit = project.commit + found = migration.find_commit(project, commit.sha) + + expect(found).to eq(commit.to_hash) + end + + it 'caches the results' do + commit = project.commit + + migration.find_commit(project, commit.sha) + + expect { migration.find_commit(project, commit.sha) } + .not_to change { Gitlab::GitalyClient.get_request_count } + end + + it 'returns an empty hash if the commit does not exist' do + expect(migration.find_commit(project, 'kittens')).to eq({}) + end + end + + describe '#find_or_create_user' do + let(:project) { create(:project, :repository) } + + it 'creates missing users' do + commit = project.commit.to_hash + id = migration.find_or_create_user(commit, :author_name, :author_email) + + expect(MergeRequest::DiffCommitUser.count).to eq(1) + + created = MergeRequest::DiffCommitUser.first + + expect(created.name).to eq(commit[:author_name]) + expect(created.email).to eq(commit[:author_email]) + expect(created.id).to eq(id) + end + + it 'returns users that already exist' do + commit = project.commit.to_hash + user1 = migration.find_or_create_user(commit, :author_name, :author_email) + user2 = migration.find_or_create_user(commit, :author_name, :author_email) + + expect(user1).to eq(user2) + end + + it 'caches the results' do + commit = project.commit.to_hash + + migration.find_or_create_user(commit, :author_name, :author_email) + + recorder = ActiveRecord::QueryRecorder.new do + migration.find_or_create_user(commit, :author_name, :author_email) + end + + expect(recorder.count).to be_zero + end + + it 'returns nil if the commit details are missing' do + id = migration.find_or_create_user({}, :author_name, :author_email) + + expect(id).to be_nil + end + end + + describe '#matches_row' do + it 'returns the query matches for the composite primary key' do + row = double(:commit, merge_request_diff_id: 4, relative_order: 5) + arel = migration.matches_row(row) + + expect(arel.to_sql).to eq( + '("merge_request_diff_commits"."merge_request_diff_id", "merge_request_diff_commits"."relative_order") = (4, 5)' + ) + end + end +end +# rubocop: enable RSpec/FactoriesInMigrationSpecs diff --git a/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb b/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb deleted file mode 100644 index d503824041b..00000000000 --- a/spec/lib/gitlab/background_migration/fix_projects_without_project_feature_spec.rb +++ /dev/null @@ -1,75 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::FixProjectsWithoutProjectFeature, schema: 2020_01_27_111840 do - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:project_features) { table(:project_features) } - - let(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - - let!(:project) { projects.create!(namespace_id: namespace.id) } - let(:private_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 0) } - let(:public_project_without_feature) { projects.create!(namespace_id: namespace.id, visibility_level: 20) } - let!(:projects_without_feature) { [private_project_without_feature, public_project_without_feature] } - - before do - project_features.create!({ project_id: project.id, pages_access_level: 20 }) - end - - subject { described_class.new.perform(Project.minimum(:id), Project.maximum(:id)) } - - def project_feature_records - project_features.order(:project_id).pluck(:project_id) - end - - def features(project) - project_features.find_by(project_id: project.id)&.attributes - end - - it 'creates a ProjectFeature for projects without it' do - expect { subject }.to change { project_feature_records }.from([project.id]).to([project.id, *projects_without_feature.map(&:id)]) - end - - it 'creates ProjectFeature records with default values for a public project' do - subject - - expect(features(public_project_without_feature)).to include( - { - "merge_requests_access_level" => 20, - "issues_access_level" => 20, - "wiki_access_level" => 20, - "snippets_access_level" => 20, - "builds_access_level" => 20, - "repository_access_level" => 20, - "pages_access_level" => 20, - "forking_access_level" => 20 - } - ) - end - - it 'creates ProjectFeature records with default values for a private project' do - subject - - expect(features(private_project_without_feature)).to include("pages_access_level" => 10) - end - - context 'when access control to pages is forced' do - before do - allow(::Gitlab::Pages).to receive(:access_control_is_forced?).and_return(true) - end - - it 'creates ProjectFeature records with default values for a public project' do - subject - - expect(features(public_project_without_feature)).to include("pages_access_level" => 10) - end - end - - it 'sets created_at/updated_at timestamps' do - subject - - expect(project_features.where('created_at IS NULL OR updated_at IS NULL')).to be_empty - end -end diff --git a/spec/lib/gitlab/background_migration/fix_projects_without_prometheus_service_spec.rb b/spec/lib/gitlab/background_migration/fix_projects_without_prometheus_service_spec.rb deleted file mode 100644 index 9a497a9e01a..00000000000 --- a/spec/lib/gitlab/background_migration/fix_projects_without_prometheus_service_spec.rb +++ /dev/null @@ -1,234 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::FixProjectsWithoutPrometheusService, :migration, schema: 2020_02_20_115023 do - def service_params_for(project_id, params = {}) - { - project_id: project_id, - active: false, - properties: '{}', - type: 'PrometheusService', - template: false, - push_events: true, - issues_events: true, - merge_requests_events: true, - tag_push_events: true, - note_events: true, - category: 'monitoring', - default: false, - wiki_page_events: true, - pipeline_events: true, - confidential_issues_events: true, - commit_events: true, - job_events: true, - confidential_note_events: true, - deployment_events: false - }.merge(params) - end - - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:services) { table(:services) } - let(:clusters) { table(:clusters) } - let(:cluster_groups) { table(:cluster_groups) } - let(:clusters_applications_prometheus) { table(:clusters_applications_prometheus) } - let(:namespace) { namespaces.create!(name: 'user', path: 'user') } - let(:project) { projects.create!(namespace_id: namespace.id) } - - let(:application_statuses) do - { - errored: -1, - installed: 3, - updated: 5 - } - end - - let(:cluster_types) do - { - instance_type: 1, - group_type: 2, - project_type: 3 - } - end - - let(:columns) do - %w(project_id active properties type template push_events - issues_events merge_requests_events tag_push_events - note_events category default wiki_page_events pipeline_events - confidential_issues_events commit_events job_events - confidential_note_events deployment_events) - end - - describe '#perform' do - shared_examples 'fix services entries state' do - it 'is idempotent' do - expect { subject.perform(project.id, project.id + 1) }.to change { services.order(:id).map { |row| row.attributes } } - - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - - context 'non prometheus services' do - it 'does not change them' do - other_type = 'SomeOtherService' - services.create!(service_params_for(project.id, active: true, type: other_type)) - - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.where(type: other_type).order(:id).map { |row| row.attributes } } - end - end - - context 'prometheus integration services do not exist' do - it 'creates missing services entries', :aggregate_failures do - expect { subject.perform(project.id, project.id + 1) }.to change { services.count }.by(1) - expect([service_params_for(project.id, active: true)]).to eq services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys } - end - - context 'template is present for prometheus services' do - it 'creates missing services entries', :aggregate_failures do - services.create!(service_params_for(nil, template: true, properties: { 'from_template' => true }.to_json)) - - expect { subject.perform(project.id, project.id + 1) }.to change { services.count }.by(1) - updated_rows = services.where(template: false).order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys } - expect([service_params_for(project.id, active: true, properties: { 'from_template' => true }.to_json)]).to eq updated_rows - end - end - end - - context 'prometheus integration services exist' do - context 'in active state' do - it 'does not change them' do - services.create!(service_params_for(project.id, active: true)) - - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - end - - context 'not in active state' do - it 'sets active attribute to true' do - service = services.create!(service_params_for(project.id, active: false)) - - expect { subject.perform(project.id, project.id + 1) }.to change { service.reload.active? }.from(false).to(true) - end - - context 'prometheus services are configured manually ' do - it 'does not change them' do - properties = '{"api_url":"http://test.dev","manual_configuration":"1"}' - services.create!(service_params_for(project.id, properties: properties, active: false)) - - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - end - end - end - end - - context 'k8s cluster shared on instance level' do - let(:cluster) { clusters.create!(name: 'cluster', cluster_type: cluster_types[:instance_type]) } - - context 'with installed prometheus application' do - before do - clusters_applications_prometheus.create!(cluster_id: cluster.id, status: application_statuses[:installed], version: '123') - end - - it_behaves_like 'fix services entries state' - end - - context 'with updated prometheus application' do - before do - clusters_applications_prometheus.create!(cluster_id: cluster.id, status: application_statuses[:updated], version: '123') - end - - it_behaves_like 'fix services entries state' - end - - context 'with errored prometheus application' do - before do - clusters_applications_prometheus.create!(cluster_id: cluster.id, status: application_statuses[:errored], version: '123') - end - - it 'does not change services entries' do - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - end - end - - context 'k8s cluster shared on group level' do - let(:cluster) { clusters.create!(name: 'cluster', cluster_type: cluster_types[:group_type]) } - - before do - cluster_groups.create!(cluster_id: cluster.id, group_id: project.namespace_id) - end - - context 'with installed prometheus application' do - before do - clusters_applications_prometheus.create!(cluster_id: cluster.id, status: application_statuses[:installed], version: '123') - end - - it_behaves_like 'fix services entries state' - - context 'second k8s cluster without application available' do - let(:namespace_2) { namespaces.create!(name: 'namespace2', path: 'namespace2') } - let(:project_2) { projects.create!(namespace_id: namespace_2.id) } - - before do - cluster_2 = clusters.create!(name: 'cluster2', cluster_type: cluster_types[:group_type]) - cluster_groups.create!(cluster_id: cluster_2.id, group_id: project_2.namespace_id) - end - - it 'changed only affected services entries' do - expect { subject.perform(project.id, project_2.id + 1) }.to change { services.count }.by(1) - expect([service_params_for(project.id, active: true)]).to eq services.order(:id).map { |row| row.attributes.slice(*columns).symbolize_keys } - end - end - end - - context 'with updated prometheus application' do - before do - clusters_applications_prometheus.create!(cluster_id: cluster.id, status: application_statuses[:updated], version: '123') - end - - it_behaves_like 'fix services entries state' - end - - context 'with errored prometheus application' do - before do - clusters_applications_prometheus.create!(cluster_id: cluster.id, status: application_statuses[:errored], version: '123') - end - - it 'does not change services entries' do - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - end - - context 'with missing prometheus application' do - it 'does not change services entries' do - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - - context 'with inactive service' do - it 'does not change services entries' do - services.create!(service_params_for(project.id)) - - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - end - end - end - - context 'k8s cluster for single project' do - let(:cluster) { clusters.create!(name: 'cluster', cluster_type: cluster_types[:project_type]) } - let(:cluster_projects) { table(:cluster_projects) } - - context 'with installed prometheus application' do - before do - cluster_projects.create!(cluster_id: cluster.id, project_id: project.id) - clusters_applications_prometheus.create!(cluster_id: cluster.id, status: application_statuses[:installed], version: '123') - end - - it 'does not change services entries' do - expect { subject.perform(project.id, project.id + 1) }.not_to change { services.order(:id).map { |row| row.attributes } } - end - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/job_coordinator_spec.rb b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb new file mode 100644 index 00000000000..a0543ca9958 --- /dev/null +++ b/spec/lib/gitlab/background_migration/job_coordinator_spec.rb @@ -0,0 +1,344 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::JobCoordinator do + let(:database) { :main } + let(:worker_class) { BackgroundMigrationWorker } + let(:coordinator) { described_class.new(database, worker_class) } + + describe '.for_database' do + it 'returns an executor with the correct worker class and database' do + coordinator = described_class.for_database(database) + + expect(coordinator.database).to eq(database) + expect(coordinator.worker_class).to eq(worker_class) + end + + context 'when passed in as a string' do + it 'retruns an executor with the correct worker class and database' do + coordinator = described_class.for_database(database.to_s) + + expect(coordinator.database).to eq(database) + expect(coordinator.worker_class).to eq(worker_class) + end + end + + context 'when an invalid value is given' do + it 'raises an error' do + expect do + described_class.for_database('notvalid') + end.to raise_error(ArgumentError, "database must be one of [main], got 'notvalid'") + end + end + end + + describe '#queue' do + it 'returns background migration worker queue' do + expect(coordinator.queue).to eq(worker_class.sidekiq_options['queue']) + end + end + + describe '#with_shared_connection' do + it 'yields to the block after properly configuring SharedModel' do + expect(Gitlab::Database::SharedModel).to receive(:using_connection) + .with(ActiveRecord::Base.connection).and_yield + + expect { |b| coordinator.with_shared_connection(&b) }.to yield_with_no_args + end + end + + describe '#steal' do + context 'when there are enqueued jobs present' do + let(:queue) do + [ + double(args: ['Foo', [10, 20]], klass: worker_class.name), + double(args: ['Bar', [20, 30]], klass: worker_class.name), + double(args: ['Foo', [20, 30]], klass: 'MergeWorker') + ] + end + + before do + allow(Sidekiq::Queue).to receive(:new) + .with(coordinator.queue) + .and_return(queue) + end + + context 'when queue contains unprocessed jobs' do + it 'steals jobs from a queue' do + expect(queue[0]).to receive(:delete).and_return(true) + + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) + + coordinator.steal('Foo') + end + + it 'sets up the shared connection while stealing jobs' do + connection = double('connection') + allow(coordinator).to receive(:connection).and_return(connection) + + expect(coordinator).to receive(:with_shared_connection).and_call_original + + expect(queue[0]).to receive(:delete).and_return(true) + + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) do + expect(Gitlab::Database::SharedModel.connection).to be(connection) + end + + coordinator.steal('Foo') do + expect(Gitlab::Database::SharedModel.connection).to be(connection) + + true # the job is only performed if the block returns true + end + end + + it 'does not steal job that has already been taken' do + expect(queue[0]).to receive(:delete).and_return(false) + + expect(coordinator).not_to receive(:perform) + + coordinator.steal('Foo') + end + + it 'does not steal jobs for a different migration' do + expect(coordinator).not_to receive(:perform) + + expect(queue[0]).not_to receive(:delete) + + coordinator.steal('Baz') + end + + context 'when a custom predicate is given' do + it 'steals jobs that match the predicate' do + expect(queue[0]).to receive(:delete).and_return(true) + + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) + + coordinator.steal('Foo') { |job| job.args.second.first == 10 && job.args.second.second == 20 } + end + + it 'does not steal jobs that do not match the predicate' do + expect(described_class).not_to receive(:perform) + + expect(queue[0]).not_to receive(:delete) + + coordinator.steal('Foo') { |(arg1, _)| arg1 == 5 } + end + end + end + + context 'when one of the jobs raises an error' do + let(:migration) { spy(:migration) } + + let(:queue) do + [double(args: ['Foo', [10, 20]], klass: worker_class.name), + double(args: ['Foo', [20, 30]], klass: worker_class.name)] + end + + before do + stub_const('Gitlab::BackgroundMigration::Foo', migration) + + allow(queue[0]).to receive(:delete).and_return(true) + allow(queue[1]).to receive(:delete).and_return(true) + end + + it 'enqueues the migration again and re-raises the error' do + allow(migration).to receive(:perform).with(10, 20).and_raise(Exception, 'Migration error').once + + expect(worker_class).to receive(:perform_async).with('Foo', [10, 20]).once + + expect { coordinator.steal('Foo') }.to raise_error(Exception) + end + end + end + + context 'when there are scheduled jobs present', :redis do + it 'steals all jobs from the scheduled sets' do + Sidekiq::Testing.disable! do + worker_class.perform_in(10.minutes, 'Object') + + expect(Sidekiq::ScheduledSet.new).to be_one + expect(coordinator).to receive(:perform).with('Object', any_args) + + coordinator.steal('Object') + + expect(Sidekiq::ScheduledSet.new).to be_none + end + end + end + + context 'when there are enqueued and scheduled jobs present', :redis do + it 'steals from the scheduled sets queue first' do + Sidekiq::Testing.disable! do + expect(coordinator).to receive(:perform).with('Object', [1]).ordered + expect(coordinator).to receive(:perform).with('Object', [2]).ordered + + worker_class.perform_async('Object', [2]) + worker_class.perform_in(10.minutes, 'Object', [1]) + + coordinator.steal('Object') + end + end + end + + context 'when retry_dead_jobs is true', :redis do + let(:retry_queue) do + [double(args: ['Object', [3]], klass: worker_class.name, delete: true)] + end + + let(:dead_queue) do + [double(args: ['Object', [4]], klass: worker_class.name, delete: true)] + end + + before do + allow(Sidekiq::RetrySet).to receive(:new).and_return(retry_queue) + allow(Sidekiq::DeadSet).to receive(:new).and_return(dead_queue) + end + + it 'steals from the dead and retry queue' do + Sidekiq::Testing.disable! do + expect(coordinator).to receive(:perform).with('Object', [1]).ordered + expect(coordinator).to receive(:perform).with('Object', [2]).ordered + expect(coordinator).to receive(:perform).with('Object', [3]).ordered + expect(coordinator).to receive(:perform).with('Object', [4]).ordered + + worker_class.perform_async('Object', [2]) + worker_class.perform_in(10.minutes, 'Object', [1]) + + coordinator.steal('Object', retry_dead_jobs: true) + end + end + end + end + + describe '#perform' do + let(:migration) { spy(:migration) } + let(:connection) { double('connection') } + + before do + stub_const('Gitlab::BackgroundMigration::Foo', migration) + + allow(coordinator).to receive(:connection).and_return(connection) + end + + it 'performs a background migration with the configured shared connection' do + expect(coordinator).to receive(:with_shared_connection).and_call_original + + expect(migration).to receive(:perform).with(10, 20).once do + expect(Gitlab::Database::SharedModel.connection).to be(connection) + end + + coordinator.perform('Foo', [10, 20]) + end + end + + describe '.remaining', :redis do + context 'when there are jobs remaining' do + before do + Sidekiq::Testing.disable! do + MergeWorker.perform_async('Foo') + MergeWorker.perform_in(10.minutes, 'Foo') + + 5.times do + worker_class.perform_async('Foo') + end + 3.times do + worker_class.perform_in(10.minutes, 'Foo') + end + end + end + + it 'returns the enqueued jobs plus the scheduled jobs' do + expect(coordinator.remaining).to eq(8) + end + end + + context 'when there are no jobs remaining' do + it 'returns zero' do + expect(coordinator.remaining).to be_zero + end + end + end + + describe '.exists?', :redis do + context 'when there are enqueued jobs present' do + before do + Sidekiq::Testing.disable! do + MergeWorker.perform_async('Bar') + worker_class.perform_async('Foo') + end + end + + it 'returns true if specific job exists' do + expect(coordinator.exists?('Foo')).to eq(true) + end + + it 'returns false if specific job does not exist' do + expect(coordinator.exists?('Bar')).to eq(false) + end + end + + context 'when there are scheduled jobs present' do + before do + Sidekiq::Testing.disable! do + MergeWorker.perform_in(10.minutes, 'Bar') + worker_class.perform_in(10.minutes, 'Foo') + end + end + + it 'returns true if specific job exists' do + expect(coordinator.exists?('Foo')).to eq(true) + end + + it 'returns false if specific job does not exist' do + expect(coordinator.exists?('Bar')).to eq(false) + end + end + end + + describe '.dead_jobs?' do + let(:queue) do + [ + double(args: ['Foo', [10, 20]], klass: worker_class.name), + double(args: ['Bar'], klass: 'MergeWorker') + ] + end + + context 'when there are dead jobs present' do + before do + allow(Sidekiq::DeadSet).to receive(:new).and_return(queue) + end + + it 'returns true if specific job exists' do + expect(coordinator.dead_jobs?('Foo')).to eq(true) + end + + it 'returns false if specific job does not exist' do + expect(coordinator.dead_jobs?('Bar')).to eq(false) + end + end + end + + describe '.retrying_jobs?' do + let(:queue) do + [ + double(args: ['Foo', [10, 20]], klass: worker_class.name), + double(args: ['Bar'], klass: 'MergeWorker') + ] + end + + context 'when there are dead jobs present' do + before do + allow(Sidekiq::RetrySet).to receive(:new).and_return(queue) + end + + it 'returns true if specific job exists' do + expect(coordinator.retrying_jobs?('Foo')).to eq(true) + end + + it 'returns false if specific job does not exist' do + expect(coordinator.retrying_jobs?('Bar')).to eq(false) + end + end + end +end diff --git a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb index b7cf101dd8a..64e8afedf52 100644 --- a/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb +++ b/spec/lib/gitlab/background_migration/link_lfs_objects_projects_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::LinkLfsObjectsProjects, :migration, schema: 2020_03_10_075115 do +RSpec.describe Gitlab::BackgroundMigration::LinkLfsObjectsProjects, :migration, schema: 20181228175414 do let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:fork_networks) { table(:fork_networks) } diff --git a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb index c58b2d609e9..4287d6723cf 100644 --- a/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_fingerprint_sha256_within_keys_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, schema: 20200106071113 do +RSpec.describe Gitlab::BackgroundMigration::MigrateFingerprintSha256WithinKeys, schema: 20181228175414 do subject(:fingerprint_migrator) { described_class.new } let(:key_table) { table(:keys) } diff --git a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb b/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb deleted file mode 100644 index f2cd2acd4f3..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_issue_trackers_sensitive_data_spec.rb +++ /dev/null @@ -1,327 +0,0 @@ -# frozen_string_literal: true -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MigrateIssueTrackersSensitiveData, schema: 20200130145430 do - let(:services) { table(:services) } - - before do - # we need to define the classes due to encryption - issue_tracker_data = Class.new(ApplicationRecord) do - self.table_name = 'issue_tracker_data' - - def self.encryption_options - { - key: Settings.attr_encrypted_db_key_base_32, - encode: true, - mode: :per_attribute_iv, - algorithm: 'aes-256-gcm' - } - end - - attr_encrypted :project_url, encryption_options - attr_encrypted :issues_url, encryption_options - attr_encrypted :new_issue_url, encryption_options - end - - jira_tracker_data = Class.new(ApplicationRecord) do - self.table_name = 'jira_tracker_data' - - def self.encryption_options - { - key: Settings.attr_encrypted_db_key_base_32, - encode: true, - mode: :per_attribute_iv, - algorithm: 'aes-256-gcm' - } - end - - attr_encrypted :url, encryption_options - attr_encrypted :api_url, encryption_options - attr_encrypted :username, encryption_options - attr_encrypted :password, encryption_options - end - - stub_const('IssueTrackerData', issue_tracker_data) - stub_const('JiraTrackerData', jira_tracker_data) - end - - let(:url) { 'http://base-url.tracker.com' } - let(:new_issue_url) { 'http://base-url.tracker.com/new_issue' } - let(:issues_url) { 'http://base-url.tracker.com/issues' } - let(:api_url) { 'http://api.tracker.com' } - let(:password) { 'passw1234' } - let(:username) { 'user9' } - let(:title) { 'Issue tracker' } - let(:description) { 'Issue tracker description' } - - let(:jira_properties) do - { - 'api_url' => api_url, - 'jira_issue_transition_id' => '5', - 'password' => password, - 'url' => url, - 'username' => username, - 'title' => title, - 'description' => description, - 'other_field' => 'something' - } - end - - let(:tracker_properties) do - { - 'project_url' => url, - 'new_issue_url' => new_issue_url, - 'issues_url' => issues_url, - 'title' => title, - 'description' => description, - 'other_field' => 'something' - } - end - - let(:tracker_properties_no_url) do - { - 'new_issue_url' => new_issue_url, - 'issues_url' => issues_url, - 'title' => title, - 'description' => description - } - end - - subject { described_class.new.perform(1, 100) } - - shared_examples 'handle properties' do - it 'does not clear the properties' do - expect { subject }.not_to change { service.reload.properties} - end - end - - context 'with Jira service' do - let!(:service) do - services.create!(id: 10, type: 'JiraService', title: nil, properties: jira_properties.to_json, category: 'issue_tracker') - end - - it_behaves_like 'handle properties' - - it 'migrates data' do - expect { subject }.to change { JiraTrackerData.count }.by(1) - - service.reload - data = JiraTrackerData.find_by(service_id: service.id) - - expect(data.url).to eq(url) - expect(data.api_url).to eq(api_url) - expect(data.username).to eq(username) - expect(data.password).to eq(password) - expect(service.title).to eq(title) - expect(service.description).to eq(description) - end - end - - context 'with bugzilla service' do - let!(:service) do - services.create!(id: 11, type: 'BugzillaService', title: nil, properties: tracker_properties.to_json, category: 'issue_tracker') - end - - it_behaves_like 'handle properties' - - it 'migrates data' do - expect { subject }.to change { IssueTrackerData.count }.by(1) - - service.reload - data = IssueTrackerData.find_by(service_id: service.id) - - expect(data.project_url).to eq(url) - expect(data.issues_url).to eq(issues_url) - expect(data.new_issue_url).to eq(new_issue_url) - expect(service.title).to eq(title) - expect(service.description).to eq(description) - end - end - - context 'with youtrack service' do - let!(:service) do - services.create!(id: 12, type: 'YoutrackService', title: nil, properties: tracker_properties_no_url.to_json, category: 'issue_tracker') - end - - it_behaves_like 'handle properties' - - it 'migrates data' do - expect { subject }.to change { IssueTrackerData.count }.by(1) - - service.reload - data = IssueTrackerData.find_by(service_id: service.id) - - expect(data.project_url).to be_nil - expect(data.issues_url).to eq(issues_url) - expect(data.new_issue_url).to eq(new_issue_url) - expect(service.title).to eq(title) - expect(service.description).to eq(description) - end - end - - context 'with gitlab service with no properties' do - let!(:service) do - services.create!(id: 13, type: 'GitlabIssueTrackerService', title: nil, properties: {}, category: 'issue_tracker') - end - - it_behaves_like 'handle properties' - - it 'does not migrate data' do - expect { subject }.not_to change { IssueTrackerData.count } - end - end - - context 'with redmine service already with data fields' do - let!(:service) do - services.create!(id: 14, type: 'RedmineService', title: nil, properties: tracker_properties_no_url.to_json, category: 'issue_tracker').tap do |service| - IssueTrackerData.create!(service_id: service.id, project_url: url, new_issue_url: new_issue_url, issues_url: issues_url) - end - end - - it_behaves_like 'handle properties' - - it 'does not create new data fields record' do - expect { subject }.not_to change { IssueTrackerData.count } - end - end - - context 'with custom issue tracker which has data fields record inconsistent with properties field' do - let!(:service) do - services.create!(id: 15, type: 'CustomIssueTrackerService', title: 'Existing title', properties: jira_properties.to_json, category: 'issue_tracker').tap do |service| - IssueTrackerData.create!(service_id: service.id, project_url: 'http://other_url', new_issue_url: 'http://other_url/new_issue', issues_url: 'http://other_url/issues') - end - end - - it_behaves_like 'handle properties' - - it 'does not update the data fields record' do - expect { subject }.not_to change { IssueTrackerData.count } - - service.reload - data = IssueTrackerData.find_by(service_id: service.id) - - expect(data.project_url).to eq('http://other_url') - expect(data.issues_url).to eq('http://other_url/issues') - expect(data.new_issue_url).to eq('http://other_url/new_issue') - expect(service.title).to eq('Existing title') - end - end - - context 'with Jira service which has data fields record inconsistent with properties field' do - let!(:service) do - services.create!(id: 16, type: 'CustomIssueTrackerService', description: 'Existing description', properties: jira_properties.to_json, category: 'issue_tracker').tap do |service| - JiraTrackerData.create!(service_id: service.id, url: 'http://other_jira_url') - end - end - - it_behaves_like 'handle properties' - - it 'does not update the data fields record' do - expect { subject }.not_to change { JiraTrackerData.count } - - service.reload - data = JiraTrackerData.find_by(service_id: service.id) - - expect(data.url).to eq('http://other_jira_url') - expect(data.password).to be_nil - expect(data.username).to be_nil - expect(data.api_url).to be_nil - expect(service.description).to eq('Existing description') - end - end - - context 'non issue tracker service' do - let!(:service) do - services.create!(id: 17, title: nil, description: nil, type: 'OtherService', properties: tracker_properties.to_json) - end - - it_behaves_like 'handle properties' - - it 'does not migrate any data' do - expect { subject }.not_to change { IssueTrackerData.count } - - service.reload - expect(service.title).to be_nil - expect(service.description).to be_nil - end - end - - context 'Jira service with empty properties' do - let!(:service) do - services.create!(id: 18, type: 'JiraService', properties: '', category: 'issue_tracker') - end - - it_behaves_like 'handle properties' - - it 'does not migrate any data' do - expect { subject }.not_to change { JiraTrackerData.count } - end - end - - context 'Jira service with nil properties' do - let!(:service) do - services.create!(id: 18, type: 'JiraService', properties: nil, category: 'issue_tracker') - end - - it_behaves_like 'handle properties' - - it 'does not migrate any data' do - expect { subject }.not_to change { JiraTrackerData.count } - end - end - - context 'Jira service with invalid properties' do - let!(:service) do - services.create!(id: 18, type: 'JiraService', properties: 'invalid data', category: 'issue_tracker') - end - - it_behaves_like 'handle properties' - - it 'does not migrate any data' do - expect { subject }.not_to change { JiraTrackerData.count } - end - end - - context 'with Jira service with invalid properties, valid Jira service and valid bugzilla service' do - let!(:jira_integration_invalid) do - services.create!(id: 19, title: 'invalid - title', description: 'invalid - description', type: 'JiraService', properties: 'invalid data', category: 'issue_tracker') - end - - let!(:jira_integration_valid) do - services.create!(id: 20, type: 'JiraService', properties: jira_properties.to_json, category: 'issue_tracker') - end - - let!(:bugzilla_integration_valid) do - services.create!(id: 11, type: 'BugzillaService', title: nil, properties: tracker_properties.to_json, category: 'issue_tracker') - end - - it 'migrates data for the valid service' do - subject - - jira_integration_invalid.reload - expect(JiraTrackerData.find_by(service_id: jira_integration_invalid.id)).to be_nil - expect(jira_integration_invalid.title).to eq('invalid - title') - expect(jira_integration_invalid.description).to eq('invalid - description') - expect(jira_integration_invalid.properties).to eq('invalid data') - - jira_integration_valid.reload - data = JiraTrackerData.find_by(service_id: jira_integration_valid.id) - - expect(data.url).to eq(url) - expect(data.api_url).to eq(api_url) - expect(data.username).to eq(username) - expect(data.password).to eq(password) - expect(jira_integration_valid.title).to eq(title) - expect(jira_integration_valid.description).to eq(description) - - bugzilla_integration_valid.reload - data = IssueTrackerData.find_by(service_id: bugzilla_integration_valid.id) - - expect(data.project_url).to eq(url) - expect(data.issues_url).to eq(issues_url) - expect(data.new_issue_url).to eq(new_issue_url) - expect(bugzilla_integration_valid.title).to eq(title) - expect(bugzilla_integration_valid.description).to eq(description) - end - end -end diff --git a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb index 91e8dcdf880..31b6ee0c7cd 100644 --- a/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_merge_request_diff_commit_users_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers do +RSpec.describe Gitlab::BackgroundMigration::MigrateMergeRequestDiffCommitUsers, schema: 20211012134316 do let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:users) { table(:users) } diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb index 9eda51f6ec4..ab183d01357 100644 --- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb +++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' require 'webauthn/u2f_migrator' -RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20200925125321 do +RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20181228175414 do let(:users) { table(:users) } let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) } diff --git a/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb b/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb deleted file mode 100644 index d90a5d30954..00000000000 --- a/spec/lib/gitlab/background_migration/migrate_users_bio_to_user_details_spec.rb +++ /dev/null @@ -1,85 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::MigrateUsersBioToUserDetails, :migration, schema: 20200323074147 do - let(:users) { table(:users) } - - let(:user_details) do - klass = table(:user_details) - klass.primary_key = :user_id - klass - end - - let!(:user_needs_migration) { users.create!(name: 'user1', email: 'test1@test.com', projects_limit: 1, bio: 'bio') } - let!(:user_needs_no_migration) { users.create!(name: 'user2', email: 'test2@test.com', projects_limit: 1) } - let!(:user_also_needs_no_migration) { users.create!(name: 'user3', email: 'test3@test.com', projects_limit: 1, bio: '') } - let!(:user_with_long_bio) { users.create!(name: 'user4', email: 'test4@test.com', projects_limit: 1, bio: 'a' * 256) } # 255 is the max - - let!(:user_already_has_details) { users.create!(name: 'user5', email: 'test5@test.com', projects_limit: 1, bio: 'my bio') } - let!(:existing_user_details) { user_details.find_or_create_by!(user_id: user_already_has_details.id).update!(bio: 'my bio') } - - # unlikely scenario since we have triggers - let!(:user_has_different_details) { users.create!(name: 'user6', email: 'test6@test.com', projects_limit: 1, bio: 'different') } - let!(:different_existing_user_details) { user_details.find_or_create_by!(user_id: user_has_different_details.id).update!(bio: 'bio') } - - let(:user_ids) do - [ - user_needs_migration, - user_needs_no_migration, - user_also_needs_no_migration, - user_with_long_bio, - user_already_has_details, - user_has_different_details - ].map(&:id) - end - - subject { described_class.new.perform(user_ids.min, user_ids.max) } - - it 'migrates all relevant records' do - subject - - all_user_details = user_details.all - expect(all_user_details.size).to eq(4) - end - - it 'migrates `bio`' do - subject - - user_detail = user_details.find_by!(user_id: user_needs_migration.id) - - expect(user_detail.bio).to eq('bio') - end - - it 'migrates long `bio`' do - subject - - user_detail = user_details.find_by!(user_id: user_with_long_bio.id) - - expect(user_detail.bio).to eq('a' * 255) - end - - it 'does not change existing user detail' do - expect { subject }.not_to change { user_details.find_by!(user_id: user_already_has_details.id).attributes } - end - - it 'changes existing user detail when the columns are different' do - expect { subject }.to change { user_details.find_by!(user_id: user_has_different_details.id).bio }.from('bio').to('different') - end - - it 'does not migrate record' do - subject - - user_detail = user_details.find_by(user_id: user_needs_no_migration.id) - - expect(user_detail).to be_nil - end - - it 'does not migrate empty bio' do - subject - - user_detail = user_details.find_by(user_id: user_also_needs_no_migration.id) - - expect(user_detail).to be_nil - end -end diff --git a/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb b/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb index 36000dc3ffd..944ee98ed4a 100644 --- a/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_canonical_emails_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::PopulateCanonicalEmails, :migration, schema: 20200312053852 do +RSpec.describe Gitlab::BackgroundMigration::PopulateCanonicalEmails, :migration, schema: 20181228175414 do let(:migration) { described_class.new } let_it_be(:users_table) { table(:users) } diff --git a/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb index bc55f240a58..dc8c8c75b83 100644 --- a/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_dismissed_state_for_vulnerabilities_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe ::Gitlab::BackgroundMigration::PopulateDismissedStateForVulnerabilities, schema: 2020_11_30_103926 do +RSpec.describe ::Gitlab::BackgroundMigration::PopulateDismissedStateForVulnerabilities, schema: 20181228175414 do let(:users) { table(:users) } let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb index 07b1d99d333..25006e663ab 100644 --- a/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_finding_uuid_for_vulnerability_feedback_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20201211090634 do +RSpec.describe Gitlab::BackgroundMigration::PopulateFindingUuidForVulnerabilityFeedback, schema: 20181228175414 do let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } let(:users) { table(:users) } diff --git a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb index c6385340ca3..6722321d5f7 100644 --- a/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_has_vulnerabilities_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::PopulateHasVulnerabilities, schema: 20201103192526 do +RSpec.describe Gitlab::BackgroundMigration::PopulateHasVulnerabilities, schema: 20181228175414 do let(:users) { table(:users) } let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb b/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb index f724b007e01..a03a11489b5 100644 --- a/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_issue_email_participants_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::PopulateIssueEmailParticipants, schema: 20201128210234 do +RSpec.describe Gitlab::BackgroundMigration::PopulateIssueEmailParticipants, schema: 20181228175414 do let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') } let!(:project) { table(:projects).create!(id: 1, namespace_id: namespace.id) } let!(:issue1) { table(:issues).create!(id: 1, project_id: project.id, service_desk_reply_to: "a@gitlab.com") } diff --git a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb index 44c5f3d1381..1c987d3876f 100644 --- a/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_missing_vulnerability_dismissal_information_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20201028160832 do +RSpec.describe Gitlab::BackgroundMigration::PopulateMissingVulnerabilityDismissalInformation, schema: 20181228175414 do let(:users) { table(:users) } let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb index e746451b1b9..f9628849dbf 100644 --- a/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_personal_snippet_statistics_spec.rb @@ -111,11 +111,11 @@ RSpec.describe Gitlab::BackgroundMigration::PopulatePersonalSnippetStatistics do if with_repo allow(snippet).to receive(:disk_path).and_return(disk_path(snippet)) + raw_repository(snippet).create_repository + TestEnv.copy_repo(snippet, bare_repo: TestEnv.factory_repo_path_bare, refs: TestEnv::BRANCH_SHA) - - raw_repository(snippet).create_repository end end end diff --git a/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb b/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb index 897f5e81372..7884e0d97c0 100644 --- a/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb +++ b/spec/lib/gitlab/background_migration/populate_project_snippet_statistics_spec.rb @@ -183,11 +183,11 @@ RSpec.describe Gitlab::BackgroundMigration::PopulateProjectSnippetStatistics do if with_repo allow(snippet).to receive(:disk_path).and_return(disk_path(snippet)) + raw_repository(snippet).create_repository + TestEnv.copy_repo(snippet, bare_repo: TestEnv.factory_repo_path_bare, refs: TestEnv::BRANCH_SHA) - - raw_repository(snippet).create_repository end end end diff --git a/spec/lib/gitlab/background_migration/populate_user_highest_roles_table_spec.rb b/spec/lib/gitlab/background_migration/populate_user_highest_roles_table_spec.rb deleted file mode 100644 index b3cacc60cdc..00000000000 --- a/spec/lib/gitlab/background_migration/populate_user_highest_roles_table_spec.rb +++ /dev/null @@ -1,71 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::PopulateUserHighestRolesTable, schema: 20200311130802 do - let(:members) { table(:members) } - let(:users) { table(:users) } - let(:user_highest_roles) { table(:user_highest_roles) } - - def create_user(id, params = {}) - user_params = { - id: id, - state: 'active', - user_type: nil, - bot_type: nil, - ghost: nil, - email: "user#{id}@example.com", - projects_limit: 0 - }.merge(params) - - users.create!(user_params) - end - - def create_member(id, access_level, params = {}) - params = { - user_id: id, - access_level: access_level, - source_id: 1, - source_type: 'Group', - notification_level: 0 - }.merge(params) - - members.create!(params) - end - - before do - create_user(1) - create_user(2, state: 'blocked') - create_user(3, user_type: 2) - create_user(4) - create_user(5, bot_type: 1) - create_user(6, ghost: true) - create_user(7, ghost: false) - create_user(8) - - create_member(1, 40) - create_member(7, 30) - create_member(8, 20, requested_at: Time.current) - - user_highest_roles.create!(user_id: 1, highest_access_level: 50) - end - - describe '#perform' do - it 'creates user_highest_roles rows according to users', :aggregate_failures do - expect { subject.perform(1, 8) }.to change(UserHighestRole, :count).from(1).to(4) - - created_or_updated_rows = [ - { 'user_id' => 1, 'highest_access_level' => 40 }, - { 'user_id' => 4, 'highest_access_level' => nil }, - { 'user_id' => 7, 'highest_access_level' => 30 }, - { 'user_id' => 8, 'highest_access_level' => nil } - ] - - rows = user_highest_roles.order(:user_id).map do |row| - row.attributes.slice('user_id', 'highest_access_level') - end - - expect(rows).to match_array(created_or_updated_rows) - end - end -end diff --git a/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb new file mode 100644 index 00000000000..24259b06469 --- /dev/null +++ b/spec/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces_spec.rb @@ -0,0 +1,254 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::BackgroundMigration::ProjectNamespaces::BackfillProjectNamespaces, :migration do + include MigrationsHelpers + + context 'when migrating data', :aggregate_failures do + let(:projects) { table(:projects) } + let(:namespaces) { table(:namespaces) } + + let(:parent_group1) { namespaces.create!(name: 'parent_group1', path: 'parent_group1', visibility_level: 20, type: 'Group') } + let(:parent_group2) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') } + + let(:parent_group1_project) { projects.create!(name: 'parent_group1_project', path: 'parent_group1_project', namespace_id: parent_group1.id, visibility_level: 20) } + let(:parent_group2_project) { projects.create!(name: 'parent_group2_project', path: 'parent_group2_project', namespace_id: parent_group2.id, visibility_level: 20) } + + let(:child_nodes_count) { 2 } + let(:tree_depth) { 3 } + + let(:backfilled_namespace) { nil } + + before do + BackfillProjectNamespaces::TreeGenerator.new(namespaces, projects, [parent_group1, parent_group2], child_nodes_count, tree_depth).build_tree + end + + describe '#up' do + shared_examples 'back-fill project namespaces' do + it 'back-fills all project namespaces' do + start_id = ::Project.minimum(:id) + end_id = ::Project.maximum(:id) + projects_count = ::Project.count + batches_count = (projects_count / described_class::BATCH_SIZE.to_f).ceil + project_namespaces_count = ::Namespace.where(type: 'Project').count + migration = described_class.new + + expect(projects_count).not_to eq(project_namespaces_count) + expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original + expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original + expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original + + expect { migration.perform(start_id, end_id, nil, 'up') }.to change(Namespace.where(type: 'Project'), :count) + + expect(projects_count).to eq(::Namespace.where(type: 'Project').count) + check_projects_in_sync_with(Namespace.where(type: 'Project')) + end + + context 'when passing specific group as parameter' do + let(:backfilled_namespace) { parent_group1 } + + it 'back-fills project namespaces for the specified group hierarchy' do + backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects + start_id = backfilled_namespace_projects.minimum(:id) + end_id = backfilled_namespace_projects.maximum(:id) + group_projects_count = backfilled_namespace_projects.count + batches_count = (group_projects_count / described_class::BATCH_SIZE.to_f).ceil + project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace)) + + migration = described_class.new + + expect(project_namespaces_in_hierarchy.count).to eq(0) + expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original + expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original + expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original + + expect(group_projects_count).to eq(14) + expect(project_namespaces_in_hierarchy.count).to eq(0) + + migration.perform(start_id, end_id, backfilled_namespace.id, 'up') + + expect(project_namespaces_in_hierarchy.count).to eq(14) + check_projects_in_sync_with(project_namespaces_in_hierarchy) + end + end + + context 'when projects already have project namespaces' do + before do + hierarchy1_projects = base_ancestor(parent_group1).first.all_projects + start_id = hierarchy1_projects.minimum(:id) + end_id = hierarchy1_projects.maximum(:id) + + described_class.new.perform(start_id, end_id, parent_group1.id, 'up') + end + + it 'does not duplicate project namespaces' do + # check there are already some project namespaces but not for all + projects_count = ::Project.count + start_id = ::Project.minimum(:id) + end_id = ::Project.maximum(:id) + batches_count = (projects_count / described_class::BATCH_SIZE.to_f).ceil + project_namespaces = ::Namespace.where(type: 'Project') + migration = described_class.new + + expect(project_namespaces_in_hierarchy(base_ancestor(parent_group1)).count).to be >= 14 + expect(project_namespaces_in_hierarchy(base_ancestor(parent_group2)).count).to eq(0) + expect(projects_count).not_to eq(project_namespaces.count) + + # run migration again to test we do not generate extra project namespaces + expect(migration).to receive(:batch_insert_namespaces).exactly(batches_count).and_call_original + expect(migration).to receive(:batch_update_projects).exactly(batches_count).and_call_original + expect(migration).to receive(:batch_update_project_namespaces_traversal_ids).exactly(batches_count).and_call_original + + expect { migration.perform(start_id, end_id, nil, 'up') }.to change(project_namespaces, :count).by(14) + + expect(projects_count).to eq(project_namespaces.count) + end + end + end + + it 'checks no project namespaces exist in the defined hierarchies' do + hierarchy1_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group1)) + hierarchy2_project_namespaces = project_namespaces_in_hierarchy(base_ancestor(parent_group2)) + hierarchy1_projects_count = base_ancestor(parent_group1).first.all_projects.count + hierarchy2_projects_count = base_ancestor(parent_group2).first.all_projects.count + + expect(hierarchy1_project_namespaces).to be_empty + expect(hierarchy2_project_namespaces).to be_empty + expect(hierarchy1_projects_count).to eq(14) + expect(hierarchy2_projects_count).to eq(14) + end + + context 'back-fill project namespaces in a single batch' do + it_behaves_like 'back-fill project namespaces' + end + + context 'back-fill project namespaces in batches' do + before do + stub_const("#{described_class.name}::BATCH_SIZE", 2) + end + + it_behaves_like 'back-fill project namespaces' + end + end + + describe '#down' do + before do + start_id = ::Project.minimum(:id) + end_id = ::Project.maximum(:id) + # back-fill first + described_class.new.perform(start_id, end_id, nil, 'up') + end + + shared_examples 'cleanup project namespaces' do + it 'removes project namespaces' do + projects_count = ::Project.count + start_id = ::Project.minimum(:id) + end_id = ::Project.maximum(:id) + migration = described_class.new + batches_count = (projects_count / described_class::BATCH_SIZE.to_f).ceil + + expect(projects_count).to be > 0 + expect(projects_count).to eq(::Namespace.where(type: 'Project').count) + + expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original + expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original + + migration.perform(start_id, end_id, nil, 'down') + + expect(::Project.count).to be > 0 + expect(::Namespace.where(type: 'Project').count).to eq(0) + end + + context 'when passing specific group as parameter' do + let(:backfilled_namespace) { parent_group1 } + + it 'removes project namespaces only for the specific group hierarchy' do + backfilled_namespace_projects = base_ancestor(backfilled_namespace).first.all_projects + start_id = backfilled_namespace_projects.minimum(:id) + end_id = backfilled_namespace_projects.maximum(:id) + group_projects_count = backfilled_namespace_projects.count + batches_count = (group_projects_count / described_class::BATCH_SIZE.to_f).ceil + project_namespaces_in_hierarchy = project_namespaces_in_hierarchy(base_ancestor(backfilled_namespace)) + migration = described_class.new + + expect(project_namespaces_in_hierarchy.count).to eq(14) + expect(migration).to receive(:nullify_project_namespaces_in_projects).exactly(batches_count).and_call_original + expect(migration).to receive(:delete_project_namespace_records).exactly(batches_count).and_call_original + + migration.perform(start_id, end_id, backfilled_namespace.id, 'down') + + expect(::Namespace.where(type: 'Project').count).to be > 0 + expect(project_namespaces_in_hierarchy.count).to eq(0) + end + end + end + + context 'cleanup project namespaces in a single batch' do + it_behaves_like 'cleanup project namespaces' + end + + context 'cleanup project namespaces in batches' do + before do + stub_const("#{described_class.name}::BATCH_SIZE", 2) + end + + it_behaves_like 'cleanup project namespaces' + end + end + end + + def base_ancestor(ancestor) + ::Namespace.where(id: ancestor.id) + end + + def project_namespaces_in_hierarchy(base_node) + Gitlab::ObjectHierarchy.new(base_node).base_and_descendants.where(type: 'Project') + end + + def check_projects_in_sync_with(namespaces) + project_namespaces_attrs = namespaces.order(:id).pluck(:id, :name, :path, :parent_id, :visibility_level, :shared_runners_enabled) + corresponding_projects_attrs = Project.where(project_namespace_id: project_namespaces_attrs.map(&:first)) + .order(:project_namespace_id).pluck(:project_namespace_id, :name, :path, :namespace_id, :visibility_level, :shared_runners_enabled) + + expect(project_namespaces_attrs).to eq(corresponding_projects_attrs) + end +end + +module BackfillProjectNamespaces + class TreeGenerator + def initialize(namespaces, projects, parent_nodes, child_nodes_count, tree_depth) + parent_nodes_ids = parent_nodes.map(&:id) + + @namespaces = namespaces + @projects = projects + @subgroups_depth = tree_depth + @resource_count = child_nodes_count + @all_groups = [parent_nodes_ids] + end + + def build_tree + (1..@subgroups_depth).each do |level| + parent_level = level - 1 + current_level = level + parent_groups = @all_groups[parent_level] + + parent_groups.each do |parent_id| + @resource_count.times do |i| + group_path = "child#{i}_level#{level}" + project_path = "project#{i}_level#{level}" + sub_group = @namespaces.create!(name: group_path, path: group_path, parent_id: parent_id, visibility_level: 20, type: 'Group') + @projects.create!(name: project_path, path: project_path, namespace_id: sub_group.id, visibility_level: 20) + + track_group_id(current_level, sub_group.id) + end + end + end + end + + def track_group_id(depth_level, group_id) + @all_groups[depth_level] ||= [] + @all_groups[depth_level] << group_id + end + end +end diff --git a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb index c1ba1607b89..1830a7fc099 100644 --- a/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb +++ b/spec/lib/gitlab/background_migration/recalculate_project_authorizations_with_min_max_user_id_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizationsWithMinMaxUserId, schema: 20200204113224 do +RSpec.describe Gitlab::BackgroundMigration::RecalculateProjectAuthorizationsWithMinMaxUserId, schema: 20181228175414 do let(:users_table) { table(:users) } let(:min) { 1 } let(:max) { 5 } diff --git a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb index 30908145782..4cdb56d3d3b 100644 --- a/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb +++ b/spec/lib/gitlab/background_migration/recalculate_vulnerabilities_occurrences_uuid_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, schema: 20201110110454 do +RSpec.describe Gitlab::BackgroundMigration::RecalculateVulnerabilitiesOccurrencesUuid, schema: 20181228175414 do let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } let(:users) { table(:users) } let(:user) { create_user! } diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb index 391b27b28e6..afcdaaf1cb8 100644 --- a/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb +++ b/spec/lib/gitlab/background_migration/remove_duplicate_services_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateServices, :migration, schema: 20201207165956 do +RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateServices, :migration, schema: 20181228175414 do let_it_be(:users) { table(:users) } let_it_be(:namespaces) { table(:namespaces) } let_it_be(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb b/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb index 47e1d4620cd..7214225c32c 100644 --- a/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb +++ b/spec/lib/gitlab/background_migration/remove_duplicate_vulnerabilities_findings_spec.rb @@ -5,9 +5,9 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindin let(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') } let(:users) { table(:users) } let(:user) { create_user! } - let(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) } + let(:project) { table(:projects).create!(id: 14219619, namespace_id: namespace.id) } let(:scanners) { table(:vulnerability_scanners) } - let!(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } + let!(:scanner1) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') } let!(:scanner2) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') } let!(:scanner3) { scanners.create!(project_id: project.id, external_id: 'test 3', name: 'test scanner 3') } let!(:unrelated_scanner) { scanners.create!(project_id: project.id, external_id: 'unreleated_scanner', name: 'unrelated scanner') } @@ -16,43 +16,68 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindin let(:vulnerability_identifiers) { table(:vulnerability_identifiers) } let(:vulnerability_identifier) do vulnerability_identifiers.create!( + id: 1244459, project_id: project.id, external_type: 'vulnerability-identifier', external_id: 'vulnerability-identifier', - fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a', + fingerprint: '0a203e8cd5260a1948edbedc76c7cb91ad6a2e45', name: 'vulnerability identifier') end - let!(:first_finding) do + let!(:vulnerability_for_first_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:first_finding_duplicate) do create_finding!( - uuid: "test1", - vulnerability_id: nil, + id: 5606961, + uuid: "bd95c085-71aa-51d7-9bb6-08ae669c262e", + vulnerability_id: vulnerability_for_first_duplicate.id, report_type: 0, - location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76', + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', primary_identifier_id: vulnerability_identifier.id, - scanner_id: scanner.id, + scanner_id: scanner1.id, project_id: project.id ) end - let!(:first_duplicate) do + let!(:vulnerability_for_second_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:second_finding_duplicate) do create_finding!( - uuid: "test2", - vulnerability_id: nil, + id: 8765432, + uuid: "5b714f58-1176-5b26-8fd5-e11dfcb031b5", + vulnerability_id: vulnerability_for_second_duplicate.id, report_type: 0, - location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76', + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', primary_identifier_id: vulnerability_identifier.id, scanner_id: scanner2.id, project_id: project.id ) end - let!(:second_duplicate) do + let!(:vulnerability_for_third_duplicate) do + create_vulnerability!( + project_id: project.id, + author_id: user.id + ) + end + + let!(:third_finding_duplicate) do create_finding!( - uuid: "test3", - vulnerability_id: nil, + id: 8832995, + uuid: "cfe435fa-b25b-5199-a56d-7b007cc9e2d4", + vulnerability_id: vulnerability_for_third_duplicate.id, report_type: 0, - location_fingerprint: '2bda3014914481791847d8eca38d1a8d13b6ad76', + location_fingerprint: '00049d5119c2cb3bfb3d1ee1f6e031fe925aed75', primary_identifier_id: vulnerability_identifier.id, scanner_id: scanner3.id, project_id: project.id @@ -61,6 +86,7 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindin let!(:unrelated_finding) do create_finding!( + id: 9999999, uuid: "unreleated_finding", vulnerability_id: nil, report_type: 1, @@ -71,7 +97,7 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindin ) end - subject { described_class.new.perform(first_finding.id, unrelated_finding.id) } + subject { described_class.new.perform(first_finding_duplicate.id, unrelated_finding.id) } before do stub_const("#{described_class}::DELETE_BATCH_SIZE", 1) @@ -82,7 +108,15 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindin expect { subject }.to change { vulnerability_findings.count }.from(4).to(2) - expect(vulnerability_findings.pluck(:id)).to eq([second_duplicate.id, unrelated_finding.id]) + expect(vulnerability_findings.pluck(:id)).to match_array([third_finding_duplicate.id, unrelated_finding.id]) + end + + it "removes vulnerabilites without findings" do + expect(vulnerabilities.count).to eq(3) + + expect { subject }.to change { vulnerabilities.count }.from(3).to(1) + + expect(vulnerabilities.pluck(:id)).to match_array([vulnerability_for_third_duplicate.id]) end private @@ -100,11 +134,12 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindin # rubocop:disable Metrics/ParameterLists def create_finding!( + id: nil, vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:, name: "test", severity: 7, confidence: 7, report_type: 0, project_fingerprint: '123qweasdzxc', location_fingerprint: 'test', metadata_version: 'test', raw_metadata: 'test', uuid: 'test') - vulnerability_findings.create!( + params = { vulnerability_id: vulnerability_id, project_id: project_id, name: name, @@ -118,7 +153,9 @@ RSpec.describe Gitlab::BackgroundMigration::RemoveDuplicateVulnerabilitiesFindin metadata_version: metadata_version, raw_metadata: raw_metadata, uuid: uuid - ) + } + params[:id] = id unless id.nil? + vulnerability_findings.create!(params) end # rubocop:enable Metrics/ParameterLists diff --git a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb index 561a602fab9..6cfdbb5a14e 100644 --- a/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb +++ b/spec/lib/gitlab/background_migration/replace_blocked_by_links_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20201015073808 do +RSpec.describe Gitlab::BackgroundMigration::ReplaceBlockedByLinks, schema: 20181228175414 do let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') } let(:project) { table(:projects).create!(namespace_id: namespace.id, name: 'gitlab') } let(:issue1) { table(:issues).create!(project_id: project.id, title: 'a') } diff --git a/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb b/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb index 68aa64a1c7d..ef90b5674f0 100644 --- a/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb +++ b/spec/lib/gitlab/background_migration/reset_shared_runners_for_transferred_projects_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::ResetSharedRunnersForTransferredProjects, schema: 20201110161542 do +RSpec.describe Gitlab::BackgroundMigration::ResetSharedRunnersForTransferredProjects, schema: 20181228175414 do let(:namespaces) { table(:namespaces) } let(:projects) { table(:projects) } diff --git a/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb b/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb deleted file mode 100644 index 46c919f0854..00000000000 --- a/spec/lib/gitlab/background_migration/set_default_iteration_cadences_spec.rb +++ /dev/null @@ -1,80 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::BackgroundMigration::SetDefaultIterationCadences, schema: 20201231133921 do - let(:namespaces) { table(:namespaces) } - let(:iterations) { table(:sprints) } - let(:iterations_cadences) { table(:iterations_cadences) } - - describe '#perform' do - context 'when no iteration cadences exists' do - let!(:group_1) { namespaces.create!(name: 'group 1', path: 'group-1') } - let!(:group_2) { namespaces.create!(name: 'group 2', path: 'group-2') } - let!(:group_3) { namespaces.create!(name: 'group 3', path: 'group-3') } - - let!(:iteration_1) { iterations.create!(group_id: group_1.id, iid: 1, title: 'Iteration 1', start_date: 10.days.ago, due_date: 8.days.ago) } - let!(:iteration_2) { iterations.create!(group_id: group_3.id, iid: 1, title: 'Iteration 2', start_date: 10.days.ago, due_date: 8.days.ago) } - let!(:iteration_3) { iterations.create!(group_id: group_3.id, iid: 1, title: 'Iteration 3', start_date: 5.days.ago, due_date: 2.days.ago) } - - subject { described_class.new.perform(group_1.id, group_2.id, group_3.id, namespaces.last.id + 1) } - - before do - subject - end - - it 'creates iterations_cadence records for the requested groups' do - expect(iterations_cadences.count).to eq(2) - end - - it 'assigns the iteration cadences to the iterations correctly' do - iterations_cadence = iterations_cadences.find_by(group_id: group_1.id) - iteration_records = iterations.where(iterations_cadence_id: iterations_cadence.id) - - expect(iterations_cadence.start_date).to eq(iteration_1.start_date) - expect(iterations_cadence.last_run_date).to eq(iteration_1.start_date) - expect(iterations_cadence.title).to eq('group 1 Iterations') - expect(iteration_records.size).to eq(1) - expect(iteration_records.first.id).to eq(iteration_1.id) - - iterations_cadence = iterations_cadences.find_by(group_id: group_3.id) - iteration_records = iterations.where(iterations_cadence_id: iterations_cadence.id) - - expect(iterations_cadence.start_date).to eq(iteration_3.start_date) - expect(iterations_cadence.last_run_date).to eq(iteration_3.start_date) - expect(iterations_cadence.title).to eq('group 3 Iterations') - expect(iteration_records.size).to eq(2) - expect(iteration_records.first.id).to eq(iteration_2.id) - expect(iteration_records.second.id).to eq(iteration_3.id) - end - - it 'does not call Group class' do - expect(::Group).not_to receive(:where) - - subject - end - end - - context 'when an iteration cadence exists for a group' do - let!(:group) { namespaces.create!(name: 'group', path: 'group') } - - let!(:iterations_cadence_1) { iterations_cadences.create!(group_id: group.id, start_date: 2.days.ago, title: 'Cadence 1') } - - let!(:iteration_1) { iterations.create!(group_id: group.id, iid: 1, title: 'Iteration 1', start_date: 10.days.ago, due_date: 8.days.ago) } - let!(:iteration_2) { iterations.create!(group_id: group.id, iterations_cadence_id: iterations_cadence_1.id, iid: 2, title: 'Iteration 2', start_date: 5.days.ago, due_date: 3.days.ago) } - - subject { described_class.new.perform(group.id) } - - it 'does not create a new iterations_cadence' do - expect { subject }.not_to change { iterations_cadences.count } - end - - it 'assigns iteration cadences to iterations if needed' do - subject - - expect(iteration_1.reload.iterations_cadence_id).to eq(iterations_cadence_1.id) - expect(iteration_2.reload.iterations_cadence_id).to eq(iterations_cadence_1.id) - end - end - end -end diff --git a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb index f23518625e4..1fdbdf25706 100644 --- a/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb +++ b/spec/lib/gitlab/background_migration/set_merge_request_diff_files_count_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schema: 20200807152315 do +RSpec.describe Gitlab::BackgroundMigration::SetMergeRequestDiffFilesCount, schema: 20181228175414 do let(:merge_request_diff_files) { table(:merge_request_diff_files) } let(:merge_request_diffs) { table(:merge_request_diffs) } let(:merge_requests) { table(:merge_requests) } diff --git a/spec/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value_spec.rb b/spec/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value_spec.rb deleted file mode 100644 index 6079ad2dd2a..00000000000 --- a/spec/lib/gitlab/background_migration/set_null_external_diff_store_to_local_value_spec.rb +++ /dev/null @@ -1,33 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -# The test setup must begin before -# 20200804041930_add_not_null_constraint_on_external_diff_store_to_merge_request_diffs.rb -# has run, or else we cannot insert a row with `NULL` `external_diff_store` to -# test against. -RSpec.describe Gitlab::BackgroundMigration::SetNullExternalDiffStoreToLocalValue, schema: 20200804035230 do - let!(:merge_request_diffs) { table(:merge_request_diffs) } - let!(:merge_requests) { table(:merge_requests) } - let!(:namespaces) { table(:namespaces) } - let!(:projects) { table(:projects) } - let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - let!(:project) { projects.create!(namespace_id: namespace.id) } - let!(:merge_request) { merge_requests.create!(source_branch: 'x', target_branch: 'master', target_project_id: project.id) } - - it 'correctly migrates nil external_diff_store to 1' do - external_diff_store_1 = merge_request_diffs.create!(external_diff_store: 1, merge_request_id: merge_request.id) - external_diff_store_2 = merge_request_diffs.create!(external_diff_store: 2, merge_request_id: merge_request.id) - external_diff_store_nil = merge_request_diffs.create!(external_diff_store: nil, merge_request_id: merge_request.id) - - described_class.new.perform(external_diff_store_1.id, external_diff_store_nil.id) - - external_diff_store_1.reload - external_diff_store_2.reload - external_diff_store_nil.reload - - expect(external_diff_store_1.external_diff_store).to eq(1) # unchanged - expect(external_diff_store_2.external_diff_store).to eq(2) # unchanged - expect(external_diff_store_nil.external_diff_store).to eq(1) # nil => 1 - end -end diff --git a/spec/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value_spec.rb b/spec/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value_spec.rb deleted file mode 100644 index 40d41262fc7..00000000000 --- a/spec/lib/gitlab/background_migration/set_null_package_files_file_store_to_local_value_spec.rb +++ /dev/null @@ -1,33 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -# The test setup must begin before -# 20200806004742_add_not_null_constraint_on_file_store_to_package_files.rb -# has run, or else we cannot insert a row with `NULL` `file_store` to -# test against. -RSpec.describe Gitlab::BackgroundMigration::SetNullPackageFilesFileStoreToLocalValue, schema: 20200806004232 do - let!(:packages_package_files) { table(:packages_package_files) } - let!(:packages_packages) { table(:packages_packages) } - let!(:projects) { table(:projects) } - let!(:namespaces) { table(:namespaces) } - let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') } - let!(:project) { projects.create!(namespace_id: namespace.id) } - let!(:package) { packages_packages.create!(project_id: project.id, name: 'bar', package_type: 1) } - - it 'correctly migrates nil file_store to 1' do - file_store_1 = packages_package_files.create!(file_store: 1, file_name: 'foo_1', file: 'foo_1', package_id: package.id) - file_store_2 = packages_package_files.create!(file_store: 2, file_name: 'foo_2', file: 'foo_2', package_id: package.id) - file_store_nil = packages_package_files.create!(file_store: nil, file_name: 'foo_nil', file: 'foo_nil', package_id: package.id) - - described_class.new.perform(file_store_1.id, file_store_nil.id) - - file_store_1.reload - file_store_2.reload - file_store_nil.reload - - expect(file_store_1.file_store).to eq(1) # unchanged - expect(file_store_2.file_store).to eq(2) # unchanged - expect(file_store_nil.file_store).to eq(1) # nil => 1 - end -end diff --git a/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb b/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb index f2fb2ab6b6e..841a7f306d7 100644 --- a/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb +++ b/spec/lib/gitlab/background_migration/steal_migrate_merge_request_diff_commit_users_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::StealMigrateMergeRequestDiffCommitUsers do +RSpec.describe Gitlab::BackgroundMigration::StealMigrateMergeRequestDiffCommitUsers, schema: 20211012134316 do let(:migration) { described_class.new } describe '#perform' do diff --git a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb index 6c0a1d3a5b0..de9799c3642 100644 --- a/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb +++ b/spec/lib/gitlab/background_migration/update_existing_subgroup_to_match_visibility_level_of_parent_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, schema: 2020_01_10_121314 do +RSpec.describe Gitlab::BackgroundMigration::UpdateExistingSubgroupToMatchVisibilityLevelOfParent, schema: 20181228175414 do include MigrationHelpers::NamespacesHelpers context 'private visibility level' do diff --git a/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb b/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb index bebb398413b..33f5e38100e 100644 --- a/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb +++ b/spec/lib/gitlab/background_migration/update_existing_users_that_require_two_factor_auth_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::UpdateExistingUsersThatRequireTwoFactorAuth, schema: 20201030121314 do +RSpec.describe Gitlab::BackgroundMigration::UpdateExistingUsersThatRequireTwoFactorAuth, schema: 20181228175414 do include MigrationHelpers::NamespacesHelpers let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE) } diff --git a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb index 2dae4a65eeb..7af11ffa1e0 100644 --- a/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb +++ b/spec/lib/gitlab/background_migration/user_mentions/create_resource_user_mention_spec.rb @@ -1,120 +1,8 @@ # frozen_string_literal: true require 'spec_helper' -require './db/post_migrate/20200128134110_migrate_commit_notes_mentions_to_db' -require './db/post_migrate/20200211155539_migrate_merge_request_mentions_to_db' - -RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20200211155539 do - include MigrationsHelpers - - context 'when migrating data' do - let(:users) { table(:users) } - let(:namespaces) { table(:namespaces) } - let(:projects) { table(:projects) } - let(:notes) { table(:notes) } - let(:routes) { table(:routes) } - - let(:author) { users.create!(email: 'author@example.com', notification_email: 'author@example.com', name: 'author', username: 'author', projects_limit: 10, state: 'active') } - let(:member) { users.create!(email: 'member@example.com', notification_email: 'member@example.com', name: 'member', username: 'member', projects_limit: 10, state: 'active') } - let(:admin) { users.create!(email: 'administrator@example.com', notification_email: 'administrator@example.com', name: 'administrator', username: 'administrator', admin: 1, projects_limit: 10, state: 'active') } - let(:john_doe) { users.create!(email: 'john_doe@example.com', notification_email: 'john_doe@example.com', name: 'john_doe', username: 'john_doe', projects_limit: 10, state: 'active') } - let(:skipped) { users.create!(email: 'skipped@example.com', notification_email: 'skipped@example.com', name: 'skipped', username: 'skipped', projects_limit: 10, state: 'active') } - - let(:mentioned_users) { [author, member, admin, john_doe, skipped] } - let(:mentioned_users_refs) { mentioned_users.map { |u| "@#{u.username}" }.join(' ') } - - let(:group) { namespaces.create!(name: 'test1', path: 'test1', runners_token: 'my-token1', project_creation_level: 1, visibility_level: 20, type: 'Group') } - let(:inaccessible_group) { namespaces.create!(name: 'test2', path: 'test2', runners_token: 'my-token2', project_creation_level: 1, visibility_level: 0, type: 'Group') } - let(:project) { projects.create!(name: 'gitlab1', path: 'gitlab1', namespace_id: group.id, visibility_level: 0) } - - let(:mentioned_groups) { [group, inaccessible_group] } - let(:group_mentions) { [group, inaccessible_group].map { |gr| "@#{gr.path}" }.join(' ') } - let(:description_mentions) { "description with mentions #{mentioned_users_refs} and #{group_mentions}" } - - before do - # build personal namespaces and routes for users - mentioned_users.each do |u| - namespace = namespaces.create!(path: u.username, name: u.name, runners_token: "my-token-u#{u.id}", owner_id: u.id, type: nil) - routes.create!(path: namespace.path, source_type: 'Namespace', source_id: namespace.id) - end - - # build namespaces and routes for groups - mentioned_groups.each do |gr| - routes.create!(path: gr.path, source_type: 'Namespace', source_id: gr.id) - end - end - - context 'migrate merge request mentions' do - let(:merge_requests) { table(:merge_requests) } - let(:merge_request_user_mentions) { table(:merge_request_user_mentions) } - - let!(:mr1) do - merge_requests.create!( - title: "title 1", state_id: 1, target_branch: 'feature1', source_branch: 'master', - source_project_id: project.id, target_project_id: project.id, author_id: author.id, - description: description_mentions - ) - end - - let!(:mr2) do - merge_requests.create!( - title: "title 2", state_id: 1, target_branch: 'feature2', source_branch: 'master', - source_project_id: project.id, target_project_id: project.id, author_id: author.id, - description: 'some description' - ) - end - - let!(:mr3) do - merge_requests.create!( - title: "title 3", state_id: 1, target_branch: 'feature3', source_branch: 'master', - source_project_id: project.id, target_project_id: project.id, author_id: author.id, - description: 'description with an email@example.com and some other @ char here.') - end - - let(:user_mentions) { merge_request_user_mentions } - let(:resource) { merge_request } - - it_behaves_like 'resource mentions migration', MigrateMergeRequestMentionsToDb, 'MergeRequest' - - context 'when FF disabled' do - before do - stub_feature_flags(migrate_user_mentions: false) - end - - it_behaves_like 'resource migration not run', MigrateMergeRequestMentionsToDb, 'MergeRequest' - end - end - - context 'migrate commit mentions' do - let(:repository) { Gitlab::Git::Repository.new('default', TEST_REPO_PATH, '', 'group/project') } - let(:commit) { Commit.new(RepoHelpers.sample_commit, project) } - let(:commit_user_mentions) { table(:commit_user_mentions) } - - let!(:note1) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions) } - let!(:note2) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'sample note') } - let!(:note3) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: description_mentions, system: true) } - - # this not does not have actual mentions - let!(:note4) { notes.create!(commit_id: commit.id, noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref' ) } - # this should have pointed to an innexisted commit record in a commits table - # but because commit is not an AR we'll just make it so that it does not have mentions - let!(:note5) { notes.create!(commit_id: 'abc', noteable_type: 'Commit', project_id: project.id, author_id: author.id, note: 'note for an email@somesite.com and some other random @ ref') } - - let(:user_mentions) { commit_user_mentions } - let(:resource) { commit } - - it_behaves_like 'resource notes mentions migration', MigrateCommitNotesMentionsToDb, 'Commit' - - context 'when FF disabled' do - before do - stub_feature_flags(migrate_user_mentions: false) - end - - it_behaves_like 'resource notes migration not run', MigrateCommitNotesMentionsToDb, 'Commit' - end - end - end +RSpec.describe Gitlab::BackgroundMigration::UserMentions::CreateResourceUserMention, schema: 20181228175414 do context 'checks no_quote_columns' do it 'has correct no_quote_columns' do expect(Gitlab::BackgroundMigration::UserMentions::Models::MergeRequest.no_quote_columns).to match([:note_id, :merge_request_id]) diff --git a/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb b/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb index 07f4429f7d9..5c197526a55 100644 --- a/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb +++ b/spec/lib/gitlab/background_migration/wrongfully_confirmed_email_unconfirmer_spec.rb @@ -2,7 +2,7 @@ require 'spec_helper' -RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20200615111857 do +RSpec.describe Gitlab::BackgroundMigration::WrongfullyConfirmedEmailUnconfirmer, schema: 20181228175414 do let(:users) { table(:users) } let(:emails) { table(:emails) } let(:user_synced_attributes_metadata) { table(:user_synced_attributes_metadata) } diff --git a/spec/lib/gitlab/background_migration_spec.rb b/spec/lib/gitlab/background_migration_spec.rb index f32e6891716..777dc8112a7 100644 --- a/spec/lib/gitlab/background_migration_spec.rb +++ b/spec/lib/gitlab/background_migration_spec.rb @@ -3,6 +3,14 @@ require 'spec_helper' RSpec.describe Gitlab::BackgroundMigration do + let(:coordinator) { described_class::JobCoordinator.for_database(:main) } + + before do + allow(described_class).to receive(:coordinator_for_database) + .with(:main) + .and_return(coordinator) + end + describe '.queue' do it 'returns background migration worker queue' do expect(described_class.queue) @@ -11,7 +19,7 @@ RSpec.describe Gitlab::BackgroundMigration do end describe '.steal' do - context 'when there are enqueued jobs present' do + context 'when the queue contains unprocessed jobs' do let(:queue) do [ double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'), @@ -22,110 +30,34 @@ RSpec.describe Gitlab::BackgroundMigration do before do allow(Sidekiq::Queue).to receive(:new) - .with(described_class.queue) + .with(coordinator.queue) .and_return(queue) end - context 'when queue contains unprocessed jobs' do - it 'steals jobs from a queue' do - expect(queue[0]).to receive(:delete).and_return(true) - - expect(described_class).to receive(:perform) - .with('Foo', [10, 20]) - - described_class.steal('Foo') - end - - it 'does not steal job that has already been taken' do - expect(queue[0]).to receive(:delete).and_return(false) - - expect(described_class).not_to receive(:perform) - - described_class.steal('Foo') - end - - it 'does not steal jobs for a different migration' do - expect(described_class).not_to receive(:perform) + it 'uses the coordinator to steal jobs' do + expect(queue[0]).to receive(:delete).and_return(true) - expect(queue[0]).not_to receive(:delete) - - described_class.steal('Baz') - end - - context 'when a custom predicate is given' do - it 'steals jobs that match the predicate' do - expect(queue[0]).to receive(:delete).and_return(true) - - expect(described_class).to receive(:perform) - .with('Foo', [10, 20]) - - described_class.steal('Foo') { |job| job.args.second.first == 10 && job.args.second.second == 20 } - end + expect(coordinator).to receive(:steal).with('Foo', retry_dead_jobs: false).and_call_original + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) - it 'does not steal jobs that do not match the predicate' do - expect(described_class).not_to receive(:perform) - - expect(queue[0]).not_to receive(:delete) - - described_class.steal('Foo') { |(arg1, _)| arg1 == 5 } - end - end + described_class.steal('Foo') end - context 'when one of the jobs raises an error' do - let(:migration) { spy(:migration) } - - let(:queue) do - [double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'), - double(args: ['Foo', [20, 30]], klass: 'BackgroundMigrationWorker')] - end - - before do - stub_const("#{described_class}::Foo", migration) - - allow(queue[0]).to receive(:delete).and_return(true) - allow(queue[1]).to receive(:delete).and_return(true) - end - - it 'enqueues the migration again and re-raises the error' do - allow(migration).to receive(:perform).with(10, 20) - .and_raise(Exception, 'Migration error').once + context 'when a custom predicate is given' do + it 'steals jobs that match the predicate' do + expect(queue[0]).to receive(:delete).and_return(true) - expect(BackgroundMigrationWorker).to receive(:perform_async) - .with('Foo', [10, 20]).once + expect(coordinator).to receive(:perform).with('Foo', [10, 20]) - expect { described_class.steal('Foo') }.to raise_error(Exception) + described_class.steal('Foo') { |job| job.args.second.first == 10 && job.args.second.second == 20 } end - end - end - context 'when there are scheduled jobs present', :redis do - it 'steals all jobs from the scheduled sets' do - Sidekiq::Testing.disable! do - BackgroundMigrationWorker.perform_in(10.minutes, 'Object') - - expect(Sidekiq::ScheduledSet.new).to be_one - expect(described_class).to receive(:perform).with('Object', any_args) - - described_class.steal('Object') + it 'does not steal jobs that do not match the predicate' do + expect(coordinator).not_to receive(:perform) - expect(Sidekiq::ScheduledSet.new).to be_none - end - end - end - - context 'when there are enqueued and scheduled jobs present', :redis do - it 'steals from the scheduled sets queue first' do - Sidekiq::Testing.disable! do - expect(described_class).to receive(:perform) - .with('Object', [1]).ordered - expect(described_class).to receive(:perform) - .with('Object', [2]).ordered - - BackgroundMigrationWorker.perform_async('Object', [2]) - BackgroundMigrationWorker.perform_in(10.minutes, 'Object', [1]) + expect(queue[0]).not_to receive(:delete) - described_class.steal('Object') + described_class.steal('Foo') { |(arg1, _)| arg1 == 5 } end end end @@ -146,14 +78,10 @@ RSpec.describe Gitlab::BackgroundMigration do it 'steals from the dead and retry queue' do Sidekiq::Testing.disable! do - expect(described_class).to receive(:perform) - .with('Object', [1]).ordered - expect(described_class).to receive(:perform) - .with('Object', [2]).ordered - expect(described_class).to receive(:perform) - .with('Object', [3]).ordered - expect(described_class).to receive(:perform) - .with('Object', [4]).ordered + expect(coordinator).to receive(:perform).with('Object', [1]).ordered + expect(coordinator).to receive(:perform).with('Object', [2]).ordered + expect(coordinator).to receive(:perform).with('Object', [3]).ordered + expect(coordinator).to receive(:perform).with('Object', [4]).ordered BackgroundMigrationWorker.perform_async('Object', [2]) BackgroundMigrationWorker.perform_in(10.minutes, 'Object', [1]) @@ -171,131 +99,54 @@ RSpec.describe Gitlab::BackgroundMigration do stub_const("#{described_class.name}::Foo", migration) end - it 'performs a background migration' do + it 'uses the coordinator to perform a background migration' do + expect(coordinator).to receive(:perform).with('Foo', [10, 20]).and_call_original expect(migration).to receive(:perform).with(10, 20).once described_class.perform('Foo', [10, 20]) end + end - context 'backward compatibility' do - it 'performs a background migration for fully-qualified job classes' do - expect(migration).to receive(:perform).with(10, 20).once - expect(Gitlab::ErrorTracking) - .to receive(:track_and_raise_for_dev_exception) - .with(instance_of(StandardError), hash_including(:class_name)) - - described_class.perform('Gitlab::BackgroundMigration::Foo', [10, 20]) + describe '.exists?', :redis do + before do + Sidekiq::Testing.disable! do + MergeWorker.perform_async('Bar') + BackgroundMigrationWorker.perform_async('Foo') end end - end - describe '.remaining', :redis do - context 'when there are jobs remaining' do - before do - Sidekiq::Testing.disable! do - MergeWorker.perform_async('Foo') - MergeWorker.perform_in(10.minutes, 'Foo') - - 5.times do - BackgroundMigrationWorker.perform_async('Foo') - end - 3.times do - BackgroundMigrationWorker.perform_in(10.minutes, 'Foo') - end - end - end + it 'uses the coordinator to find if a job exists' do + expect(coordinator).to receive(:exists?).with('Foo', []).and_call_original - it 'returns the enqueued jobs plus the scheduled jobs' do - expect(described_class.remaining).to eq(8) - end + expect(described_class.exists?('Foo')).to eq(true) end - context 'when there are no jobs remaining' do - it 'returns zero' do - expect(described_class.remaining).to be_zero - end + it 'uses the coordinator to find a job does not exist' do + expect(coordinator).to receive(:exists?).with('Bar', []).and_call_original + + expect(described_class.exists?('Bar')).to eq(false) end end - describe '.exists?', :redis do - context 'when there are enqueued jobs present' do - before do - Sidekiq::Testing.disable! do - MergeWorker.perform_async('Bar') + describe '.remaining', :redis do + before do + Sidekiq::Testing.disable! do + MergeWorker.perform_async('Foo') + MergeWorker.perform_in(10.minutes, 'Foo') + + 5.times do BackgroundMigrationWorker.perform_async('Foo') end - end - - it 'returns true if specific job exists' do - expect(described_class.exists?('Foo')).to eq(true) - end - - it 'returns false if specific job does not exist' do - expect(described_class.exists?('Bar')).to eq(false) - end - end - - context 'when there are scheduled jobs present' do - before do - Sidekiq::Testing.disable! do - MergeWorker.perform_in(10.minutes, 'Bar') + 3.times do BackgroundMigrationWorker.perform_in(10.minutes, 'Foo') end end - - it 'returns true if specific job exists' do - expect(described_class.exists?('Foo')).to eq(true) - end - - it 'returns false if specific job does not exist' do - expect(described_class.exists?('Bar')).to eq(false) - end - end - end - - describe '.dead_jobs?' do - let(:queue) do - [ - double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'), - double(args: ['Bar'], klass: 'MergeWorker') - ] end - context 'when there are dead jobs present' do - before do - allow(Sidekiq::DeadSet).to receive(:new).and_return(queue) - end - - it 'returns true if specific job exists' do - expect(described_class.dead_jobs?('Foo')).to eq(true) - end + it 'uses the coordinator to find the number of remaining jobs' do + expect(coordinator).to receive(:remaining).and_call_original - it 'returns false if specific job does not exist' do - expect(described_class.dead_jobs?('Bar')).to eq(false) - end - end - end - - describe '.retrying_jobs?' do - let(:queue) do - [ - double(args: ['Foo', [10, 20]], klass: 'BackgroundMigrationWorker'), - double(args: ['Bar'], klass: 'MergeWorker') - ] - end - - context 'when there are dead jobs present' do - before do - allow(Sidekiq::RetrySet).to receive(:new).and_return(queue) - end - - it 'returns true if specific job exists' do - expect(described_class.retrying_jobs?('Foo')).to eq(true) - end - - it 'returns false if specific job does not exist' do - expect(described_class.retrying_jobs?('Bar')).to eq(false) - end + expect(described_class.remaining).to eq(8) end end end diff --git a/spec/lib/gitlab/bare_repository_import/importer_spec.rb b/spec/lib/gitlab/bare_repository_import/importer_spec.rb index e09430a858c..b0d721a74ce 100644 --- a/spec/lib/gitlab/bare_repository_import/importer_spec.rb +++ b/spec/lib/gitlab/bare_repository_import/importer_spec.rb @@ -89,10 +89,8 @@ RSpec.describe Gitlab::BareRepositoryImport::Importer, :seed_helper do project = Project.find_by_full_path(project_path) repo_path = "#{project.disk_path}.git" - hook_path = File.join(repo_path, 'hooks') expect(gitlab_shell.repository_exists?(project.repository_storage, repo_path)).to be(true) - expect(TestEnv.storage_dir_exists?(project.repository_storage, hook_path)).to be(true) end context 'hashed storage enabled' do diff --git a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb index 4e4d921d67f..f9313f0ff28 100644 --- a/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb +++ b/spec/lib/gitlab/bitbucket_server_import/importer_spec.rb @@ -142,7 +142,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do expect { subject.execute }.to change { MergeRequest.count }.by(1) merge_request = MergeRequest.first - expect(merge_request.author).to eq(pull_request_author) + expect(merge_request.author).to eq(expected_author) end end @@ -151,7 +151,25 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do stub_feature_flags(bitbucket_server_user_mapping_by_username: false) end - include_examples 'imports pull requests' + context 'when email is not present' do + before do + allow(pull_request).to receive(:author_email).and_return(nil) + end + + let(:expected_author) { project_creator } + + include_examples 'imports pull requests' + end + + context 'when email is present' do + before do + allow(pull_request).to receive(:author_email).and_return(pull_request_author.email) + end + + let(:expected_author) { pull_request_author } + + include_examples 'imports pull requests' + end end context 'when bitbucket_server_user_mapping_by_username feature flag is enabled' do @@ -159,19 +177,24 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do stub_feature_flags(bitbucket_server_user_mapping_by_username: true) end - include_examples 'imports pull requests' do - context 'when username is not present' do - before do - allow(pull_request).to receive(:author_username).and_return(nil) - end + context 'when username is not present' do + before do + allow(pull_request).to receive(:author_username).and_return(nil) + end - it 'maps by email' do - expect { subject.execute }.to change { MergeRequest.count }.by(1) + let(:expected_author) { project_creator } - merge_request = MergeRequest.first - expect(merge_request.author).to eq(pull_request_author) - end + include_examples 'imports pull requests' + end + + context 'when username is present' do + before do + allow(pull_request).to receive(:author_username).and_return(pull_request_author.username) end + + let(:expected_author) { pull_request_author } + + include_examples 'imports pull requests' end end @@ -228,7 +251,23 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do allow(subject.client).to receive(:activities).and_return([pr_comment]) end - it 'maps by email' do + it 'defaults to import user' do + expect { subject.execute }.to change { MergeRequest.count }.by(1) + + merge_request = MergeRequest.first + expect(merge_request.notes.count).to eq(1) + note = merge_request.notes.first + expect(note.author).to eq(project_creator) + end + end + + context 'when username is present' do + before do + allow(pr_note).to receive(:author_username).and_return(note_author.username) + allow(subject.client).to receive(:activities).and_return([pr_comment]) + end + + it 'maps by username' do expect { subject.execute }.to change { MergeRequest.count }.by(1) merge_request = MergeRequest.first @@ -241,7 +280,7 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do end context 'metrics' do - let(:histogram) { double(:histogram) } + let(:histogram) { double(:histogram).as_null_object } let(:counter) { double('counter', increment: true) } before do @@ -276,7 +315,6 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do ) expect(counter).to receive(:increment) - allow(histogram).to receive(:observe).with({ importer: :bitbucket_server_importer }, anything) subject.execute end @@ -384,13 +422,13 @@ RSpec.describe Gitlab::BitbucketServerImport::Importer do allow(inline_note).to receive(:author_username).and_return(nil) end - it 'maps by email' do + it 'defaults to import user' do expect { subject.execute }.to change { MergeRequest.count }.by(1) notes = MergeRequest.first.notes.order(:id).to_a - expect(notes.first.author).to eq(inline_note_author) - expect(notes.last.author).to eq(reply_author) + expect(notes.first.author).to eq(project_creator) + expect(notes.last.author).to eq(project_creator) end end end diff --git a/spec/lib/gitlab/blob_helper_spec.rb b/spec/lib/gitlab/blob_helper_spec.rb index 65fa5bf0120..a2f20dcd4fc 100644 --- a/spec/lib/gitlab/blob_helper_spec.rb +++ b/spec/lib/gitlab/blob_helper_spec.rb @@ -7,6 +7,7 @@ RSpec.describe Gitlab::BlobHelper do let(:project) { create(:project) } let(:blob) { fake_blob(path: 'file.txt') } + let(:webp_blob) { fake_blob(path: 'file.webp') } let(:large_blob) { fake_blob(path: 'test.pdf', size: 2.megabytes, binary: true) } describe '#extname' do @@ -62,8 +63,15 @@ RSpec.describe Gitlab::BlobHelper do end describe '#image?' do - it 'returns false' do - expect(blob.image?).to be_falsey + context 'with a .txt file' do + it 'returns false' do + expect(blob.image?).to be_falsey + end + end + context 'with a .webp file' do + it 'returns true' do + expect(webp_blob.image?).to be_truthy + end end end diff --git a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb index 83a37655ea9..e982f0eb015 100644 --- a/spec/lib/gitlab/ci/artifact_file_reader_spec.rb +++ b/spec/lib/gitlab/ci/artifact_file_reader_spec.rb @@ -18,17 +18,6 @@ RSpec.describe Gitlab::Ci::ArtifactFileReader do expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom') end - context 'when FF ci_new_artifact_file_reader is disabled' do - before do - stub_feature_flags(ci_new_artifact_file_reader: false) - end - - it 'returns the content at the path' do - is_expected.to be_present - expect(YAML.safe_load(subject).keys).to contain_exactly('rspec', 'time', 'custom') - end - end - context 'when path does not exist' do let(:path) { 'file/does/not/exist.txt' } let(:expected_error) do diff --git a/spec/lib/gitlab/ci/artifacts/metrics_spec.rb b/spec/lib/gitlab/ci/artifacts/metrics_spec.rb index 3a2095498ec..0ce76285b03 100644 --- a/spec/lib/gitlab/ci/artifacts/metrics_spec.rb +++ b/spec/lib/gitlab/ci/artifacts/metrics_spec.rb @@ -10,9 +10,9 @@ RSpec.describe Gitlab::Ci::Artifacts::Metrics, :prometheus do let(:counter) { metrics.send(:destroyed_artifacts_counter) } it 'increments a single counter' do - subject.increment_destroyed_artifacts(10) - subject.increment_destroyed_artifacts(20) - subject.increment_destroyed_artifacts(30) + subject.increment_destroyed_artifacts_count(10) + subject.increment_destroyed_artifacts_count(20) + subject.increment_destroyed_artifacts_count(30) expect(counter.get).to eq 60 expect(counter.values.count).to eq 1 diff --git a/spec/lib/gitlab/ci/build/auto_retry_spec.rb b/spec/lib/gitlab/ci/build/auto_retry_spec.rb index fc5999d59ac..9ff9200322e 100644 --- a/spec/lib/gitlab/ci/build/auto_retry_spec.rb +++ b/spec/lib/gitlab/ci/build/auto_retry_spec.rb @@ -25,6 +25,8 @@ RSpec.describe Gitlab::Ci::Build::AutoRetry do "quota is exceeded" | 0 | { max: 2 } | :ci_quota_exceeded | false "no matching runner" | 0 | { max: 2 } | :no_matching_runner | false "missing dependencies" | 0 | { max: 2 } | :missing_dependency_failure | false + "forward deployment failure" | 0 | { max: 2 } | :forward_deployment_failure | false + "environment creation failure" | 0 | { max: 2 } | :environment_creation_failure | false end with_them do diff --git a/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb b/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb index 86dd5569a96..f192862c1c4 100644 --- a/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb +++ b/spec/lib/gitlab/ci/build/rules/rule/clause/exists_spec.rb @@ -3,10 +3,8 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists do - describe '#satisfied_by?' do - let(:pipeline) { build(:ci_pipeline, project: project, sha: project.repository.head_commit.sha) } - - subject { described_class.new(globs).satisfied_by?(pipeline, nil) } + shared_examples 'an exists rule with a context' do + subject { described_class.new(globs).satisfied_by?(pipeline, context) } it_behaves_like 'a glob matching rule' do let(:project) { create(:project, :custom_repo, files: files) } @@ -24,4 +22,26 @@ RSpec.describe Gitlab::Ci::Build::Rules::Rule::Clause::Exists do it { is_expected.to be_truthy } end end + + describe '#satisfied_by?' do + let(:pipeline) { build(:ci_pipeline, project: project, sha: project.repository.head_commit.sha) } + + context 'when context is Build::Context::Build' do + it_behaves_like 'an exists rule with a context' do + let(:context) { Gitlab::Ci::Build::Context::Build.new(pipeline, sha: 'abc1234') } + end + end + + context 'when context is Build::Context::Global' do + it_behaves_like 'an exists rule with a context' do + let(:context) { Gitlab::Ci::Build::Context::Global.new(pipeline, yaml_variables: {}) } + end + end + + context 'when context is Config::External::Context' do + it_behaves_like 'an exists rule with a context' do + let(:context) { Gitlab::Ci::Config::External::Context.new(project: project, sha: project.repository.tree.sha) } + end + end + end end diff --git a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb index b99048e2c18..0505b17ea91 100644 --- a/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/include/rules/rule_spec.rb @@ -5,7 +5,7 @@ require 'fast_spec_helper' RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do let(:factory) do Gitlab::Config::Entry::Factory.new(described_class) - .value(config) + .value(config) end subject(:entry) { factory.create! } @@ -25,6 +25,12 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do it { is_expected.to be_valid } end + context 'when specifying an exists: clause' do + let(:config) { { exists: './this.md' } } + + it { is_expected.to be_valid } + end + context 'using a list of multiple expressions' do let(:config) { { if: ['$MY_VAR == "this"', '$YOUR_VAR == "that"'] } } @@ -86,5 +92,13 @@ RSpec.describe Gitlab::Ci::Config::Entry::Include::Rules::Rule do expect(subject).to eq(if: '$THIS || $THAT') end end + + context 'when specifying an exists: clause' do + let(:config) { { exists: './test.md' } } + + it 'returns the config' do + expect(subject).to eq(exists: './test.md') + end + end end end diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb index b872f6644a2..c9c28e2eb8b 100644 --- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb +++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb @@ -33,6 +33,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do end end + context 'when job name is more than 255' do + let(:entry) { node_class.new(config, name: ('a' * 256).to_sym) } + + it 'shows a validation error' do + expect(entry.errors).to include "job name is too long (maximum is 255 characters)" + end + end + context 'when job name is empty' do let(:entry) { node_class.new(config, name: ''.to_sym) } diff --git a/spec/lib/gitlab/ci/config/extendable_spec.rb b/spec/lib/gitlab/ci/config/extendable_spec.rb index 481f55d790e..2fc009569fc 100644 --- a/spec/lib/gitlab/ci/config/extendable_spec.rb +++ b/spec/lib/gitlab/ci/config/extendable_spec.rb @@ -73,6 +73,50 @@ RSpec.describe Gitlab::Ci::Config::Extendable do end end + context 'when the job tries to delete an extension key' do + let(:hash) do + { + something: { + script: 'deploy', + only: { variables: %w[$SOMETHING] } + }, + + test1: { + extends: 'something', + script: 'ls', + only: {} + }, + + test2: { + extends: 'something', + script: 'ls', + only: nil + } + } + end + + it 'deletes the key if assigned to null' do + expect(subject.to_hash).to eq( + something: { + script: 'deploy', + only: { variables: %w[$SOMETHING] } + }, + test1: { + extends: 'something', + script: 'ls', + only: { + variables: %w[$SOMETHING] + } + }, + test2: { + extends: 'something', + script: 'ls', + only: nil + } + ) + end + end + context 'when a hash uses recursive extensions' do let(:hash) do { diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb index c2f28253f54..2e9e6f95071 100644 --- a/spec/lib/gitlab/ci/config/external/processor_spec.rb +++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb @@ -406,7 +406,7 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do context 'when rules defined' do context 'when a rule is invalid' do let(:values) do - { include: [{ local: 'builds.yml', rules: [{ exists: ['$MY_VAR'] }] }] } + { include: [{ local: 'builds.yml', rules: [{ changes: ['$MY_VAR'] }] }] } end it 'raises IncludeError' do diff --git a/spec/lib/gitlab/ci/config/external/rules_spec.rb b/spec/lib/gitlab/ci/config/external/rules_spec.rb index 9a5c29befa2..1e42cb30ae7 100644 --- a/spec/lib/gitlab/ci/config/external/rules_spec.rb +++ b/spec/lib/gitlab/ci/config/external/rules_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -require 'fast_spec_helper' +require 'spec_helper' RSpec.describe Gitlab::Ci::Config::External::Rules do let(:rule_hashes) {} @@ -32,6 +32,26 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do end end + context 'when there is a rule with exists' do + let(:project) { create(:project, :repository) } + let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['test.md']) } + let(:rule_hashes) { [{ exists: 'Dockerfile' }] } + + context 'when the file does not exist' do + it { is_expected.to eq(false) } + end + + context 'when the file exists' do + let(:context) { double(project: project, sha: project.repository.tree.sha, top_level_worktree_paths: ['Dockerfile']) } + + before do + project.repository.create_file(project.owner, 'Dockerfile', "commit", message: 'test', branch_name: "master") + end + + it { is_expected.to eq(true) } + end + end + context 'when there is a rule with if and when' do let(:rule_hashes) { [{ if: '$MY_VAR == "hello"', when: 'on_success' }] } @@ -41,12 +61,12 @@ RSpec.describe Gitlab::Ci::Config::External::Rules do end end - context 'when there is a rule with exists' do - let(:rule_hashes) { [{ exists: ['$MY_VAR'] }] } + context 'when there is a rule with changes' do + let(:rule_hashes) { [{ changes: ['$MY_VAR'] }] } it 'raises an error' do expect { result }.to raise_error(described_class::InvalidIncludeRulesError, - 'invalid include rule: {:exists=>["$MY_VAR"]}') + 'invalid include rule: {:changes=>["$MY_VAR"]}') end end end diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb index 3ec4519748f..1b3e8a2ce4a 100644 --- a/spec/lib/gitlab/ci/config_spec.rb +++ b/spec/lib/gitlab/ci/config_spec.rb @@ -14,7 +14,7 @@ RSpec.describe Gitlab::Ci::Config do end let(:config) do - described_class.new(yml, project: nil, sha: nil, user: nil) + described_class.new(yml, project: nil, pipeline: nil, sha: nil, user: nil) end context 'when config is valid' do @@ -286,9 +286,12 @@ RSpec.describe Gitlab::Ci::Config do end context "when using 'include' directive" do - let(:group) { create(:group) } + let_it_be(:group) { create(:group) } + let(:project) { create(:project, :repository, group: group) } let(:main_project) { create(:project, :repository, :public, group: group) } + let(:pipeline) { build(:ci_pipeline, project: project) } + let(:remote_location) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' } let(:local_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' } @@ -327,7 +330,7 @@ RSpec.describe Gitlab::Ci::Config do end let(:config) do - described_class.new(gitlab_ci_yml, project: project, sha: '12345', user: user) + described_class.new(gitlab_ci_yml, project: project, pipeline: pipeline, sha: '12345', user: user) end before do @@ -594,7 +597,7 @@ RSpec.describe Gitlab::Ci::Config do job1: { script: ["echo 'hello from main file'"], variables: { - VARIABLE_DEFINED_IN_MAIN_FILE: 'some value' + VARIABLE_DEFINED_IN_MAIN_FILE: 'some value' } } }) @@ -725,26 +728,91 @@ RSpec.describe Gitlab::Ci::Config do end context "when an 'include' has rules" do + context "when the rule is an if" do + let(:gitlab_ci_yml) do + <<~HEREDOC + include: + - local: #{local_location} + rules: + - if: $CI_PROJECT_ID == "#{project_id}" + image: ruby:2.7 + HEREDOC + end + + context 'when the rules condition is satisfied' do + let(:project_id) { project.id } + + it 'includes the file' do + expect(config.to_hash).to include(local_location_hash) + end + end + + context 'when the rules condition is satisfied' do + let(:project_id) { non_existing_record_id } + + it 'does not include the file' do + expect(config.to_hash).not_to include(local_location_hash) + end + end + end + + context "when the rule is an exists" do + let(:gitlab_ci_yml) do + <<~HEREDOC + include: + - local: #{local_location} + rules: + - exists: "#{filename}" + image: ruby:2.7 + HEREDOC + end + + before do + project.repository.create_file( + project.creator, + 'my_builds.yml', + local_file_content, + message: 'Add my_builds.yml', + branch_name: '12345' + ) + end + + context 'when the exists file does not exist' do + let(:filename) { 'not_a_real_file.md' } + + it 'does not include the file' do + expect(config.to_hash).not_to include(local_location_hash) + end + end + + context 'when the exists file does exist' do + let(:filename) { 'my_builds.yml' } + + it 'does include the file' do + expect(config.to_hash).to include(local_location_hash) + end + end + end + end + + context "when an 'include' has rules with a pipeline variable" do let(:gitlab_ci_yml) do <<~HEREDOC include: - local: #{local_location} rules: - - if: $CI_PROJECT_ID == "#{project_id}" - image: ruby:2.7 + - if: $CI_COMMIT_SHA == "#{project.commit.sha}" HEREDOC end - context 'when the rules condition is satisfied' do - let(:project_id) { project.id } - + context 'when a pipeline is passed' do it 'includes the file' do expect(config.to_hash).to include(local_location_hash) end end - context 'when the rules condition is satisfied' do - let(:project_id) { non_existing_record_id } + context 'when a pipeline is not passed' do + let(:pipeline) { nil } it 'does not include the file' do expect(config.to_hash).not_to include(local_location_hash) diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb index 16517b39a45..cf21c98dbd5 100644 --- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb @@ -83,7 +83,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do end end - it 'respects the defined payload schema' do + it 'respects the defined payload schema', :saas do expect(::Gitlab::HTTP).to receive(:post) do |_url, params| expect(params[:body]).to match_schema('/external_validation') expect(params[:timeout]).to eq(described_class::DEFAULT_VALIDATION_REQUEST_TIMEOUT) diff --git a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb index c52994fc6a2..5b0917c5c6f 100644 --- a/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb @@ -3,9 +3,9 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Pipeline::Quota::Deployments do - let_it_be(:namespace) { create(:namespace) } - let_it_be(:default_plan, reload: true) { create(:default_plan) } - let_it_be(:project, reload: true) { create(:project, :repository, namespace: namespace) } + let_it_be_with_refind(:namespace) { create(:namespace) } + let_it_be_with_reload(:default_plan) { create(:default_plan) } + let_it_be_with_reload(:project) { create(:project, :repository, namespace: namespace) } let_it_be(:plan_limits) { create(:plan_limits, plan: default_plan) } let(:pipeline) { build_stubbed(:ci_pipeline, project: project) } diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb index 3aa6b2e3c05..e2b64e65938 100644 --- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb +++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do - let_it_be(:project) { create(:project, :repository) } + let_it_be_with_reload(:project) { create(:project, :repository) } let_it_be(:head_sha) { project.repository.head_commit.id } let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) } @@ -13,7 +13,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do let(:previous_stages) { [] } let(:current_stage) { double(seeds_names: [attributes[:name]]) } - let(:seed_build) { described_class.new(seed_context, attributes, previous_stages, current_stage) } + let(:seed_build) { described_class.new(seed_context, attributes, previous_stages + [current_stage]) } describe '#attributes' do subject { seed_build.attributes } @@ -393,12 +393,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do describe '#to_resource' do subject { seed_build.to_resource } - context 'when job is not a bridge' do + context 'when job is Ci::Build' do it { is_expected.to be_a(::Ci::Build) } it { is_expected.to be_valid } shared_examples_for 'deployment job' do it 'returns a job with deployment' do + expect { subject }.to change { Environment.count }.by(1) + expect(subject.deployment).not_to be_nil expect(subject.deployment.deployable).to eq(subject) expect(subject.deployment.environment.name).to eq(expected_environment_name) @@ -413,6 +415,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do shared_examples_for 'ensures environment existence' do it 'has environment' do + expect { subject }.to change { Environment.count }.by(1) + expect(subject).to be_has_environment expect(subject.environment).to eq(environment_name) expect(subject.metadata.expanded_environment_name).to eq(expected_environment_name) @@ -422,6 +426,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do shared_examples_for 'ensures environment inexistence' do it 'does not have environment' do + expect { subject }.not_to change { Environment.count } + expect(subject).not_to be_has_environment expect(subject.environment).to be_nil expect(subject.metadata&.expanded_environment_name).to be_nil @@ -1212,14 +1218,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do ] end - context 'when FF :variable_inside_variable is enabled' do - before do - stub_feature_flags(variable_inside_variable: [project]) - end - - it "does not have errors" do - expect(subject.errors).to be_empty - end + it "does not have errors" do + expect(subject.errors).to be_empty end end @@ -1232,36 +1232,20 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do ] end - context 'when FF :variable_inside_variable is disabled' do - before do - stub_feature_flags(variable_inside_variable: false) - end - - it "does not have errors" do - expect(subject.errors).to be_empty - end + it "returns an error" do + expect(subject.errors).to contain_exactly( + 'rspec: circular variable reference detected: ["A", "B", "C"]') end - context 'when FF :variable_inside_variable is enabled' do - before do - stub_feature_flags(variable_inside_variable: [project]) - end + context 'with job:rules:[if:]' do + let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$C != null', when: 'always' }] } } - it "returns an error" do - expect(subject.errors).to contain_exactly( - 'rspec: circular variable reference detected: ["A", "B", "C"]') + it "included? does not raise" do + expect { subject.included? }.not_to raise_error end - context 'with job:rules:[if:]' do - let(:attributes) { { name: 'rspec', ref: 'master', rules: [{ if: '$C != null', when: 'always' }] } } - - it "included? does not raise" do - expect { subject.included? }.not_to raise_error - end - - it "included? returns true" do - expect(subject.included?).to eq(true) - end + it "included? returns true" do + expect(subject.included?).to eq(true) end end end diff --git a/spec/lib/gitlab/ci/reports/security/report_spec.rb b/spec/lib/gitlab/ci/reports/security/report_spec.rb index 5a85c3f19fc..a8b962ee970 100644 --- a/spec/lib/gitlab/ci/reports/security/report_spec.rb +++ b/spec/lib/gitlab/ci/reports/security/report_spec.rb @@ -221,4 +221,26 @@ RSpec.describe Gitlab::Ci::Reports::Security::Report do end end end + + describe '#has_signatures?' do + let(:finding) { create(:ci_reports_security_finding, signatures: signatures) } + + subject { report.has_signatures? } + + before do + report.add_finding(finding) + end + + context 'when the findings of the report does not have signatures' do + let(:signatures) { [] } + + it { is_expected.to be_falsey } + end + + context 'when the findings of the report have signatures' do + let(:signatures) { [instance_double(Gitlab::Ci::Reports::Security::FindingSignature)] } + + it { is_expected.to be_truthy } + end + end end diff --git a/spec/lib/gitlab/ci/reports/security/reports_spec.rb b/spec/lib/gitlab/ci/reports/security/reports_spec.rb index 9b1e02f1418..79eee642552 100644 --- a/spec/lib/gitlab/ci/reports/security/reports_spec.rb +++ b/spec/lib/gitlab/ci/reports/security/reports_spec.rb @@ -54,11 +54,12 @@ RSpec.describe Gitlab::Ci::Reports::Security::Reports do end describe "#violates_default_policy_against?" do - let(:high_severity_dast) { build(:ci_reports_security_finding, severity: 'high', report_type: :dast) } + let(:high_severity_dast) { build(:ci_reports_security_finding, severity: 'high', report_type: 'dast') } let(:vulnerabilities_allowed) { 0 } let(:severity_levels) { %w(critical high) } + let(:vulnerability_states) { %w(newly_detected)} - subject { security_reports.violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels) } + subject { security_reports.violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels, vulnerability_states) } before do security_reports.get_report('sast', artifact).add_finding(high_severity_dast) @@ -108,6 +109,22 @@ RSpec.describe Gitlab::Ci::Reports::Security::Reports do it { is_expected.to be(false) } end + + context 'with related report_types' do + let(:report_types) { %w(dast sast) } + + subject { security_reports.violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels, vulnerability_states, report_types) } + + it { is_expected.to be(true) } + end + + context 'with unrelated report_types' do + let(:report_types) { %w(dependency_scanning sast) } + + subject { security_reports.violates_default_policy_against?(target_reports, vulnerabilities_allowed, severity_levels, vulnerability_states, report_types) } + + it { is_expected.to be(false) } + end end end end diff --git a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb index d377cf0c735..789f694b4b4 100644 --- a/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb +++ b/spec/lib/gitlab/ci/templates/Jobs/deploy_gitlab_ci_yaml_spec.rb @@ -27,9 +27,9 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do end describe 'the created pipeline' do - let(:project) { create(:project, :repository) } - let(:user) { project.owner } + let_it_be(:project, refind: true) { create(:project, :repository) } + let(:user) { project.owner } let(:default_branch) { 'master' } let(:pipeline_ref) { default_branch } let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } @@ -43,23 +43,23 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do allow(project).to receive(:default_branch).and_return(default_branch) end - context 'with no cluster' do + context 'with no cluster or agent' do it 'does not create any kubernetes deployment jobs' do expect(build_names).to eq %w(placeholder) end end context 'with only a disabled cluster' do - let!(:cluster) { create(:cluster, :project, :provided_by_gcp, enabled: false, projects: [project]) } + before do + create(:cluster, :project, :provided_by_gcp, enabled: false, projects: [project]) + end it 'does not create any kubernetes deployment jobs' do expect(build_names).to eq %w(placeholder) end end - context 'with an active cluster' do - let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } - + shared_examples_for 'pipeline with deployment jobs' do context 'on master' do it 'by default' do expect(build_names).to include('production') @@ -218,5 +218,21 @@ RSpec.describe 'Jobs/Deploy.gitlab-ci.yml' do end end end + + context 'with an agent' do + before do + create(:cluster_agent, project: project) + end + + it_behaves_like 'pipeline with deployment jobs' + end + + context 'with a cluster' do + before do + create(:cluster, :project, :provided_by_gcp, projects: [project]) + end + + it_behaves_like 'pipeline with deployment jobs' + end end end diff --git a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..b9256ece78b --- /dev/null +++ b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb @@ -0,0 +1,65 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/SAST-IaC.latest') } + + describe 'the created pipeline' do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { project.owner } + + let(:default_branch) { 'main' } + let(:pipeline_ref) { default_branch } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_ref) } + let(:pipeline) { service.execute!(:push).payload } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + allow_next_instance_of(Ci::BuildScheduleWorker) do |instance| + allow(instance).to receive(:perform).and_return(true) + end + allow(project).to receive(:default_branch).and_return(default_branch) + end + + context 'on feature branch' do + let(:pipeline_ref) { 'feature' } + + it 'creates the kics-iac-sast job' do + expect(build_names).to contain_exactly('kics-iac-sast') + end + end + + context 'on merge request' do + let(:service) { MergeRequests::CreatePipelineService.new(project: project, current_user: user) } + let(:merge_request) { create(:merge_request, :simple, source_project: project) } + let(:pipeline) { service.execute(merge_request).payload } + + it 'has no jobs' do + expect(pipeline).to be_merge_request_event + expect(build_names).to be_empty + end + end + + context 'SAST_DISABLED is set' do + before do + create(:ci_variable, key: 'SAST_DISABLED', value: 'true', project: project) + end + + context 'on default branch' do + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + + context 'on feature branch' do + let(:pipeline_ref) { 'feature' } + + it 'has no jobs' do + expect { pipeline }.to raise_error(Ci::CreatePipelineService::CreateError) + end + end + end + end +end diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb index 7602309627b..64ef6ecd7f8 100644 --- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb +++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb @@ -148,9 +148,7 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do it_behaves_like 'no Kubernetes deployment job' end - context 'when the project has an active cluster' do - let!(:cluster) { create(:cluster, :project, :provided_by_gcp, projects: [project]) } - + shared_examples 'pipeline with Kubernetes jobs' do describe 'deployment-related builds' do context 'on default branch' do it 'does not include rollout jobs besides production' do @@ -233,6 +231,22 @@ RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do end end end + + context 'when a cluster is attached' do + before do + create(:cluster, :project, :provided_by_gcp, projects: [project]) + end + + it_behaves_like 'pipeline with Kubernetes jobs' + end + + context 'when project has an Agent is present' do + before do + create(:cluster_agent, project: project) + end + + it_behaves_like 'pipeline with Kubernetes jobs' + end end describe 'buildpack detection' do diff --git a/spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb new file mode 100644 index 00000000000..c7dbbea4622 --- /dev/null +++ b/spec/lib/gitlab/ci/templates/kaniko_gitlab_ci_yaml_spec.rb @@ -0,0 +1,25 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe 'Kaniko.gitlab-ci.yml' do + subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Kaniko') } + + describe 'the created pipeline' do + let(:pipeline_branch) { 'master' } + let(:project) { create(:project, :custom_repo, files: { 'Dockerfile' => 'FROM alpine:latest' }) } + let(:user) { project.owner } + let(:service) { Ci::CreatePipelineService.new(project, user, ref: pipeline_branch ) } + let(:pipeline) { service.execute!(:push).payload } + let(:build_names) { pipeline.builds.pluck(:name) } + + before do + stub_ci_pipeline_yaml_file(template.content) + allow(Ci::BuildScheduleWorker).to receive(:perform).and_return(true) + end + + it 'creates "kaniko-build" job' do + expect(build_names).to include('kaniko-build') + end + end +end diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb index 3d1306e82a5..fd5d5d6af7f 100644 --- a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb +++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb @@ -27,7 +27,7 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do context 'on master branch' do it 'creates init, validate and build jobs', :aggregate_failures do expect(pipeline.errors).to be_empty - expect(build_names).to include('init', 'validate', 'build', 'deploy') + expect(build_names).to include('validate', 'build', 'deploy') end end diff --git a/spec/lib/gitlab/ci/trace/archive_spec.rb b/spec/lib/gitlab/ci/trace/archive_spec.rb index c9fc4e720c4..5e965f94347 100644 --- a/spec/lib/gitlab/ci/trace/archive_spec.rb +++ b/spec/lib/gitlab/ci/trace/archive_spec.rb @@ -3,99 +3,134 @@ require 'spec_helper' RSpec.describe Gitlab::Ci::Trace::Archive do - let_it_be(:job) { create(:ci_build, :success, :trace_live) } - let_it_be_with_reload(:trace_metadata) { create(:ci_build_trace_metadata, build: job) } - let_it_be(:src_checksum) do - job.trace.read { |stream| Digest::MD5.hexdigest(stream.raw) } - end - - let(:metrics) { spy('metrics') } - - describe '#execute' do - subject { described_class.new(job, trace_metadata, metrics) } - - it 'computes and assigns checksum' do - Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream| - expect { subject.execute!(stream) }.to change { Ci::JobArtifact.count }.by(1) - end - - expect(trace_metadata.checksum).to eq(src_checksum) - expect(trace_metadata.trace_artifact).to eq(job.job_artifacts_trace) + context 'with transactional fixtures' do + let_it_be(:job) { create(:ci_build, :success, :trace_live) } + let_it_be_with_reload(:trace_metadata) { create(:ci_build_trace_metadata, build: job) } + let_it_be(:src_checksum) do + job.trace.read { |stream| Digest::MD5.hexdigest(stream.raw) } end - context 'validating artifact checksum' do - let(:trace) { 'abc' } - let(:stream) { StringIO.new(trace, 'rb') } - let(:src_checksum) { Digest::MD5.hexdigest(trace) } + let(:metrics) { spy('metrics') } - context 'when the object store is disabled' do - before do - stub_artifacts_object_storage(enabled: false) - end - - it 'skips validation' do - subject.execute!(stream) + describe '#execute' do + subject { described_class.new(job, trace_metadata, metrics) } - expect(trace_metadata.checksum).to eq(src_checksum) - expect(trace_metadata.remote_checksum).to be_nil - expect(metrics) - .not_to have_received(:increment_error_counter) - .with(type: :archive_invalid_checksum) + it 'computes and assigns checksum' do + Gitlab::Ci::Trace::ChunkedIO.new(job) do |stream| + expect { subject.execute!(stream) }.to change { Ci::JobArtifact.count }.by(1) end + + expect(trace_metadata.checksum).to eq(src_checksum) + expect(trace_metadata.trace_artifact).to eq(job.job_artifacts_trace) end - context 'with background_upload enabled' do - before do - stub_artifacts_object_storage(background_upload: true) - end + context 'validating artifact checksum' do + let(:trace) { 'abc' } + let(:stream) { StringIO.new(trace, 'rb') } + let(:src_checksum) { Digest::MD5.hexdigest(trace) } - it 'skips validation' do - subject.execute!(stream) + context 'when the object store is disabled' do + before do + stub_artifacts_object_storage(enabled: false) + end - expect(trace_metadata.checksum).to eq(src_checksum) - expect(trace_metadata.remote_checksum).to be_nil - expect(metrics) - .not_to have_received(:increment_error_counter) - .with(type: :archive_invalid_checksum) + it 'skips validation' do + subject.execute!(stream) + expect(trace_metadata.checksum).to eq(src_checksum) + expect(trace_metadata.remote_checksum).to be_nil + expect(metrics) + .not_to have_received(:increment_error_counter) + .with(error_reason: :archive_invalid_checksum) + end end - end - context 'with direct_upload enabled' do - before do - stub_artifacts_object_storage(direct_upload: true) - end + context 'with background_upload enabled' do + before do + stub_artifacts_object_storage(background_upload: true) + end - it 'validates the archived trace' do - subject.execute!(stream) + it 'skips validation' do + subject.execute!(stream) - expect(trace_metadata.checksum).to eq(src_checksum) - expect(trace_metadata.remote_checksum).to eq(src_checksum) - expect(metrics) - .not_to have_received(:increment_error_counter) - .with(type: :archive_invalid_checksum) + expect(trace_metadata.checksum).to eq(src_checksum) + expect(trace_metadata.remote_checksum).to be_nil + expect(metrics) + .not_to have_received(:increment_error_counter) + .with(error_reason: :archive_invalid_checksum) + end end - context 'when the checksum does not match' do - let(:invalid_remote_checksum) { SecureRandom.hex } - + context 'with direct_upload enabled' do before do - expect(::Gitlab::Ci::Trace::RemoteChecksum) - .to receive(:new) - .with(an_instance_of(Ci::JobArtifact)) - .and_return(double(md5_checksum: invalid_remote_checksum)) + stub_artifacts_object_storage(direct_upload: true) end it 'validates the archived trace' do subject.execute!(stream) expect(trace_metadata.checksum).to eq(src_checksum) - expect(trace_metadata.remote_checksum).to eq(invalid_remote_checksum) + expect(trace_metadata.remote_checksum).to eq(src_checksum) expect(metrics) - .to have_received(:increment_error_counter) - .with(type: :archive_invalid_checksum) + .not_to have_received(:increment_error_counter) + .with(error_reason: :archive_invalid_checksum) + end + + context 'when the checksum does not match' do + let(:invalid_remote_checksum) { SecureRandom.hex } + + before do + expect(::Gitlab::Ci::Trace::RemoteChecksum) + .to receive(:new) + .with(an_instance_of(Ci::JobArtifact)) + .and_return(double(md5_checksum: invalid_remote_checksum)) + end + + it 'validates the archived trace' do + subject.execute!(stream) + + expect(trace_metadata.checksum).to eq(src_checksum) + expect(trace_metadata.remote_checksum).to eq(invalid_remote_checksum) + expect(metrics) + .to have_received(:increment_error_counter) + .with(error_reason: :archive_invalid_checksum) + end end end end end end + + context 'without transactional fixtures', :delete do + let(:job) { create(:ci_build, :success, :trace_live) } + let(:trace_metadata) { create(:ci_build_trace_metadata, build: job) } + let(:stream) { StringIO.new('abc', 'rb') } + + describe '#execute!' do + subject(:execute) do + ::Gitlab::Ci::Trace::Archive.new(job, trace_metadata).execute!(stream) + end + + before do + stub_artifacts_object_storage(direct_upload: true) + end + + it 'does not upload the trace inside a database transaction', :delete do + expect(Ci::ApplicationRecord.connection.transaction_open?).to be_falsey + + allow_next_instance_of(Ci::JobArtifact) do |artifact| + artifact.job_id = job.id + + expect(artifact) + .to receive(:store_file!) + .and_wrap_original do |store_method, *args| + expect(Ci::ApplicationRecord.connection.transaction_open?).to be_falsey + + store_method.call(*args) + end + end + + execute + end + end + end end diff --git a/spec/lib/gitlab/ci/trace/metrics_spec.rb b/spec/lib/gitlab/ci/trace/metrics_spec.rb index 53e55a57973..733ffbbea22 100644 --- a/spec/lib/gitlab/ci/trace/metrics_spec.rb +++ b/spec/lib/gitlab/ci/trace/metrics_spec.rb @@ -17,23 +17,23 @@ RSpec.describe Gitlab::Ci::Trace::Metrics, :prometheus do end describe '#increment_error_counter' do - context 'when the operation type is known' do + context 'when the error reason is known' do it 'increments the counter' do - subject.increment_error_counter(type: :chunks_invalid_size) - subject.increment_error_counter(type: :chunks_invalid_checksum) - subject.increment_error_counter(type: :archive_invalid_checksum) + subject.increment_error_counter(error_reason: :chunks_invalid_size) + subject.increment_error_counter(error_reason: :chunks_invalid_checksum) + subject.increment_error_counter(error_reason: :archive_invalid_checksum) - expect(described_class.trace_errors_counter.get(type: :chunks_invalid_size)).to eq 1 - expect(described_class.trace_errors_counter.get(type: :chunks_invalid_checksum)).to eq 1 - expect(described_class.trace_errors_counter.get(type: :archive_invalid_checksum)).to eq 1 + expect(described_class.trace_errors_counter.get(error_reason: :chunks_invalid_size)).to eq 1 + expect(described_class.trace_errors_counter.get(error_reason: :chunks_invalid_checksum)).to eq 1 + expect(described_class.trace_errors_counter.get(error_reason: :archive_invalid_checksum)).to eq 1 expect(described_class.trace_errors_counter.values.count).to eq 3 end end - context 'when the operation type is known' do + context 'when the error reason is unknown' do it 'raises an exception' do - expect { subject.increment_error_counter(type: :invalid_type) } + expect { subject.increment_error_counter(error_reason: :invalid_type) } .to raise_error(ArgumentError) end end diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb index 1a31b2dad56..888ceb7ff9a 100644 --- a/spec/lib/gitlab/ci/trace_spec.rb +++ b/spec/lib/gitlab/ci/trace_spec.rb @@ -25,16 +25,6 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_defa artifact1.file.migrate!(ObjectStorage::Store::REMOTE) end - it 'reloads the trace after is it migrated' do - stub_const('Gitlab::HttpIO::BUFFER_SIZE', test_data.length) - - expect_next_instance_of(Gitlab::HttpIO) do |http_io| - expect(http_io).to receive(:get_chunk).and_return(test_data, "") - end - - expect(artifact2.job.trace.raw).to eq(test_data) - end - it 'reloads the trace in case of a chunk error' do chunk_error = described_class::ChunkedIO::FailedToGetChunkError diff --git a/spec/lib/gitlab/ci/variables/builder_spec.rb b/spec/lib/gitlab/ci/variables/builder_spec.rb new file mode 100644 index 00000000000..10275f33484 --- /dev/null +++ b/spec/lib/gitlab/ci/variables/builder_spec.rb @@ -0,0 +1,38 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Ci::Variables::Builder do + let(:builder) { described_class.new(pipeline) } + let(:pipeline) { create(:ci_pipeline) } + let(:job) { create(:ci_build, pipeline: pipeline) } + + describe '#scoped_variables' do + let(:environment) { job.expanded_environment_name } + let(:dependencies) { true } + + subject { builder.scoped_variables(job, environment: environment, dependencies: dependencies) } + + it 'returns the expected variables' do + keys = %w[CI_JOB_NAME + CI_JOB_STAGE + CI_NODE_TOTAL + CI_BUILD_NAME + CI_BUILD_STAGE] + + subject.map { |env| env[:key] }.tap do |names| + expect(names).to include(*keys) + end + end + + context 'feature flag disabled' do + before do + stub_feature_flags(ci_predefined_vars_in_builder: false) + end + + it 'returns no variables' do + expect(subject.map { |env| env[:key] }).to be_empty + end + end + end +end diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb index 7ba98380986..26c560565e0 100644 --- a/spec/lib/gitlab/ci/variables/collection_spec.rb +++ b/spec/lib/gitlab/ci/variables/collection_spec.rb @@ -358,302 +358,210 @@ RSpec.describe Gitlab::Ci::Variables::Collection do end describe '#sort_and_expand_all' do - context 'when FF :variable_inside_variable is disabled' do - let_it_be(:project_with_flag_disabled) { create(:project) } - let_it_be(:project_with_flag_enabled) { create(:project) } - - before do - stub_feature_flags(variable_inside_variable: [project_with_flag_enabled]) - end + context 'table tests' do + using RSpec::Parameterized::TableSyntax - context 'table tests' do - using RSpec::Parameterized::TableSyntax - - where do - { - "empty array": { - variables: [], - keep_undefined: false - }, - "simple expansions": { - variables: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, - { key: 'variable3', value: 'key$variable$variable2' } - ], - keep_undefined: false - }, - "complex expansion": { - variables: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'key${variable}' } - ], - keep_undefined: false - }, - "out-of-order variable reference": { - variables: [ - { key: 'variable2', value: 'key${variable}' }, - { key: 'variable', value: 'value' } - ], - keep_undefined: false - }, - "complex expansions with raw variable": { - variables: [ - { key: 'variable3', value: 'key_${variable}_${variable2}' }, - { key: 'variable', value: '$variable2', raw: true }, - { key: 'variable2', value: 'value2' } - ], - keep_undefined: false - }, - "escaped characters in complex expansions are kept intact": { - variables: [ - { key: 'variable3', value: 'key_${variable}_$${HOME}_%%HOME%%' }, - { key: 'variable', value: '$variable2' }, - { key: 'variable2', value: 'value2' } - ], - keep_undefined: false - }, - "array with cyclic dependency": { - variables: [ - { key: 'variable', value: '$variable2' }, - { key: 'variable2', value: '$variable3' }, - { key: 'variable3', value: 'key$variable$variable2' } - ], - keep_undefined: true - } + where do + { + "empty array": { + variables: [], + keep_undefined: false, + result: [] + }, + "simple expansions": { + variables: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result' }, + { key: 'variable3', value: 'key$variable$variable2' }, + { key: 'variable4', value: 'key$variable$variable3' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result' }, + { key: 'variable3', value: 'keyvalueresult' }, + { key: 'variable4', value: 'keyvaluekeyvalueresult' } + ] + }, + "complex expansion": { + variables: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'key${variable}' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'keyvalue' } + ] + }, + "unused variables": { + variables: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result2' }, + { key: 'variable3', value: 'result3' }, + { key: 'variable4', value: 'key$variable$variable3' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result2' }, + { key: 'variable3', value: 'result3' }, + { key: 'variable4', value: 'keyvalueresult3' } + ] + }, + "complex expansions": { + variables: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result' }, + { key: 'variable3', value: 'key${variable}${variable2}' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result' }, + { key: 'variable3', value: 'keyvalueresult' } + ] + }, + "escaped characters in complex expansions keeping undefined are kept intact": { + variables: [ + { key: 'variable3', value: 'key_${variable}_$${HOME}_%%HOME%%' }, + { key: 'variable', value: '$variable2' }, + { key: 'variable2', value: 'value' } + ], + keep_undefined: true, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'value' }, + { key: 'variable3', value: 'key_value_$${HOME}_%%HOME%%' } + ] + }, + "escaped characters in complex expansions discarding undefined are kept intact": { + variables: [ + { key: 'variable2', value: 'key_${variable4}_$${HOME}_%%HOME%%' }, + { key: 'variable', value: 'value_$${HOME}_%%HOME%%' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: 'value_$${HOME}_%%HOME%%' }, + { key: 'variable2', value: 'key__$${HOME}_%%HOME%%' } + ] + }, + "out-of-order expansion": { + variables: [ + { key: 'variable3', value: 'key$variable2$variable' }, + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result' } + ], + keep_undefined: false, + result: [ + { key: 'variable2', value: 'result' }, + { key: 'variable', value: 'value' }, + { key: 'variable3', value: 'keyresultvalue' } + ] + }, + "out-of-order complex expansion": { + variables: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result' }, + { key: 'variable3', value: 'key${variable2}${variable}' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable2', value: 'result' }, + { key: 'variable3', value: 'keyresultvalue' } + ] + }, + "missing variable discarding original": { + variables: [ + { key: 'variable2', value: 'key$variable' } + ], + keep_undefined: false, + result: [ + { key: 'variable2', value: 'key' } + ] + }, + "missing variable keeping original": { + variables: [ + { key: 'variable2', value: 'key$variable' } + ], + keep_undefined: true, + result: [ + { key: 'variable2', value: 'key$variable' } + ] + }, + "complex expansions with missing variable keeping original": { + variables: [ + { key: 'variable4', value: 'key${variable}${variable2}${variable3}' }, + { key: 'variable', value: 'value' }, + { key: 'variable3', value: 'value3' } + ], + keep_undefined: true, + result: [ + { key: 'variable', value: 'value' }, + { key: 'variable3', value: 'value3' }, + { key: 'variable4', value: 'keyvalue${variable2}value3' } + ] + }, + "complex expansions with raw variable": { + variables: [ + { key: 'variable3', value: 'key_${variable}_${variable2}' }, + { key: 'variable', value: '$variable2', raw: true }, + { key: 'variable2', value: 'value2' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: '$variable2', raw: true }, + { key: 'variable2', value: 'value2' }, + { key: 'variable3', value: 'key_$variable2_value2' } + ] + }, + "variable value referencing password with special characters": { + variables: [ + { key: 'VAR', value: '$PASSWORD' }, + { key: 'PASSWORD', value: 'my_password$$_%%_$A' }, + { key: 'A', value: 'value' } + ], + keep_undefined: false, + result: [ + { key: 'VAR', value: 'my_password$$_%%_value' }, + { key: 'PASSWORD', value: 'my_password$$_%%_value' }, + { key: 'A', value: 'value' } + ] + }, + "cyclic dependency causes original array to be returned": { + variables: [ + { key: 'variable', value: '$variable2' }, + { key: 'variable2', value: '$variable3' }, + { key: 'variable3', value: 'key$variable$variable2' } + ], + keep_undefined: false, + result: [ + { key: 'variable', value: '$variable2' }, + { key: 'variable2', value: '$variable3' }, + { key: 'variable3', value: 'key$variable$variable2' } + ] } - end - - with_them do - let(:collection) { Gitlab::Ci::Variables::Collection.new(variables, keep_undefined: keep_undefined) } - - subject { collection.sort_and_expand_all(project_with_flag_disabled) } - - it 'returns Collection' do - is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection) - end - - it 'does not expand variables' do - var_hash = variables.pluck(:key, :value).to_h - expect(subject.to_hash).to eq(var_hash) - end - end + } end - end - context 'when FF :variable_inside_variable is enabled' do - let_it_be(:project_with_flag_disabled) { create(:project) } - let_it_be(:project_with_flag_enabled) { create(:project) } + with_them do + let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) } - before do - stub_feature_flags(variable_inside_variable: [project_with_flag_enabled]) - end + subject { collection.sort_and_expand_all(keep_undefined: keep_undefined) } - context 'table tests' do - using RSpec::Parameterized::TableSyntax - - where do - { - "empty array": { - variables: [], - keep_undefined: false, - result: [] - }, - "simple expansions": { - variables: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, - { key: 'variable3', value: 'key$variable$variable2' }, - { key: 'variable4', value: 'key$variable$variable3' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, - { key: 'variable3', value: 'keyvalueresult' }, - { key: 'variable4', value: 'keyvaluekeyvalueresult' } - ] - }, - "complex expansion": { - variables: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'key${variable}' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'keyvalue' } - ] - }, - "unused variables": { - variables: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result2' }, - { key: 'variable3', value: 'result3' }, - { key: 'variable4', value: 'key$variable$variable3' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result2' }, - { key: 'variable3', value: 'result3' }, - { key: 'variable4', value: 'keyvalueresult3' } - ] - }, - "complex expansions": { - variables: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, - { key: 'variable3', value: 'key${variable}${variable2}' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, - { key: 'variable3', value: 'keyvalueresult' } - ] - }, - "escaped characters in complex expansions keeping undefined are kept intact": { - variables: [ - { key: 'variable3', value: 'key_${variable}_$${HOME}_%%HOME%%' }, - { key: 'variable', value: '$variable2' }, - { key: 'variable2', value: 'value' } - ], - keep_undefined: true, - result: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'value' }, - { key: 'variable3', value: 'key_value_$${HOME}_%%HOME%%' } - ] - }, - "escaped characters in complex expansions discarding undefined are kept intact": { - variables: [ - { key: 'variable2', value: 'key_${variable4}_$${HOME}_%%HOME%%' }, - { key: 'variable', value: 'value_$${HOME}_%%HOME%%' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: 'value_$${HOME}_%%HOME%%' }, - { key: 'variable2', value: 'key__$${HOME}_%%HOME%%' } - ] - }, - "out-of-order expansion": { - variables: [ - { key: 'variable3', value: 'key$variable2$variable' }, - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' } - ], - keep_undefined: false, - result: [ - { key: 'variable2', value: 'result' }, - { key: 'variable', value: 'value' }, - { key: 'variable3', value: 'keyresultvalue' } - ] - }, - "out-of-order complex expansion": { - variables: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, - { key: 'variable3', value: 'key${variable2}${variable}' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: 'value' }, - { key: 'variable2', value: 'result' }, - { key: 'variable3', value: 'keyresultvalue' } - ] - }, - "missing variable discarding original": { - variables: [ - { key: 'variable2', value: 'key$variable' } - ], - keep_undefined: false, - result: [ - { key: 'variable2', value: 'key' } - ] - }, - "missing variable keeping original": { - variables: [ - { key: 'variable2', value: 'key$variable' } - ], - keep_undefined: true, - result: [ - { key: 'variable2', value: 'key$variable' } - ] - }, - "complex expansions with missing variable keeping original": { - variables: [ - { key: 'variable4', value: 'key${variable}${variable2}${variable3}' }, - { key: 'variable', value: 'value' }, - { key: 'variable3', value: 'value3' } - ], - keep_undefined: true, - result: [ - { key: 'variable', value: 'value' }, - { key: 'variable3', value: 'value3' }, - { key: 'variable4', value: 'keyvalue${variable2}value3' } - ] - }, - "complex expansions with raw variable": { - variables: [ - { key: 'variable3', value: 'key_${variable}_${variable2}' }, - { key: 'variable', value: '$variable2', raw: true }, - { key: 'variable2', value: 'value2' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: '$variable2', raw: true }, - { key: 'variable2', value: 'value2' }, - { key: 'variable3', value: 'key_$variable2_value2' } - ] - }, - "variable value referencing password with special characters": { - variables: [ - { key: 'VAR', value: '$PASSWORD' }, - { key: 'PASSWORD', value: 'my_password$$_%%_$A' }, - { key: 'A', value: 'value' } - ], - keep_undefined: false, - result: [ - { key: 'VAR', value: 'my_password$$_%%_value' }, - { key: 'PASSWORD', value: 'my_password$$_%%_value' }, - { key: 'A', value: 'value' } - ] - }, - "cyclic dependency causes original array to be returned": { - variables: [ - { key: 'variable', value: '$variable2' }, - { key: 'variable2', value: '$variable3' }, - { key: 'variable3', value: 'key$variable$variable2' } - ], - keep_undefined: false, - result: [ - { key: 'variable', value: '$variable2' }, - { key: 'variable2', value: '$variable3' }, - { key: 'variable3', value: 'key$variable$variable2' } - ] - } - } + it 'returns Collection' do + is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection) end - with_them do - let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) } - - subject { collection.sort_and_expand_all(project_with_flag_enabled, keep_undefined: keep_undefined) } - - it 'returns Collection' do - is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection) - end - - it 'expands variables' do - var_hash = result.to_h { |env| [env.fetch(:key), env.fetch(:value)] } - .with_indifferent_access - expect(subject.to_hash).to eq(var_hash) - end + it 'expands variables' do + var_hash = result.to_h { |env| [env.fetch(:key), env.fetch(:value)] } + .with_indifferent_access + expect(subject.to_hash).to eq(var_hash) + end - it 'preserves raw attribute' do - expect(subject.pluck(:key, :raw).to_h).to eq(collection.pluck(:key, :raw).to_h) - end + it 'preserves raw attribute' do + expect(subject.pluck(:key, :raw).to_h).to eq(collection.pluck(:key, :raw).to_h) end end end diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb index 1591c2e6b60..f00a801286d 100644 --- a/spec/lib/gitlab/ci/yaml_processor_spec.rb +++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb @@ -1046,6 +1046,64 @@ module Gitlab end end + context 'when overriding `extends`' do + let(:config) do + <<~YAML + .base: + script: test + variables: + VAR1: base var 1 + + test1: + extends: .base + variables: + VAR1: test1 var 1 + VAR2: test2 var 2 + + test2: + extends: .base + variables: + VAR2: test2 var 2 + + test3: + extends: .base + variables: {} + + test4: + extends: .base + variables: null + YAML + end + + it 'correctly extends jobs' do + expect(config_processor.builds[0]).to include( + name: 'test1', + options: { script: ['test'] }, + job_variables: [{ key: 'VAR1', value: 'test1 var 1', public: true }, + { key: 'VAR2', value: 'test2 var 2', public: true }] + ) + + expect(config_processor.builds[1]).to include( + name: 'test2', + options: { script: ['test'] }, + job_variables: [{ key: 'VAR1', value: 'base var 1', public: true }, + { key: 'VAR2', value: 'test2 var 2', public: true }] + ) + + expect(config_processor.builds[2]).to include( + name: 'test3', + options: { script: ['test'] }, + job_variables: [{ key: 'VAR1', value: 'base var 1', public: true }] + ) + + expect(config_processor.builds[3]).to include( + name: 'test4', + options: { script: ['test'] }, + job_variables: [] + ) + end + end + context 'when using recursive `extends`' do let(:config) do <<~YAML diff --git a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb index 5a4e9001ac9..933b6d6be9e 100644 --- a/spec/lib/gitlab/config_checker/external_database_checker_spec.rb +++ b/spec/lib/gitlab/config_checker/external_database_checker_spec.rb @@ -8,7 +8,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do context 'when database meets minimum supported version' do before do - allow(Gitlab::Database.main).to receive(:postgresql_minimum_supported_version?).and_return(true) + allow(ApplicationRecord.database).to receive(:postgresql_minimum_supported_version?).and_return(true) end it { is_expected.to be_empty } @@ -16,7 +16,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do context 'when database does not meet minimum supported version' do before do - allow(Gitlab::Database.main).to receive(:postgresql_minimum_supported_version?).and_return(false) + allow(ApplicationRecord.database).to receive(:postgresql_minimum_supported_version?).and_return(false) end let(:notice_deprecated_database) do @@ -26,7 +26,7 @@ RSpec.describe Gitlab::ConfigChecker::ExternalDatabaseChecker do '%{pg_version_minimum} is required for this version of GitLab. ' \ 'Please upgrade your environment to a supported PostgreSQL version, ' \ 'see %{pg_requirements_url} for details.') % { - pg_version_current: Gitlab::Database.main.version, + pg_version_current: ApplicationRecord.database.version, pg_version_minimum: Gitlab::Database::MINIMUM_POSTGRES_VERSION, pg_requirements_url: '<a href="https://docs.gitlab.com/ee/install/requirements.html#database">database requirements</a>' } diff --git a/spec/lib/gitlab/container_repository/tags/cache_spec.rb b/spec/lib/gitlab/container_repository/tags/cache_spec.rb new file mode 100644 index 00000000000..f84c1ce173f --- /dev/null +++ b/spec/lib/gitlab/container_repository/tags/cache_spec.rb @@ -0,0 +1,133 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe ::Gitlab::ContainerRepository::Tags::Cache, :clean_gitlab_redis_cache do + let_it_be(:dummy_tag_class) { Struct.new(:name, :created_at) } + let_it_be(:repository) { create(:container_repository) } + + let(:tags) { create_tags(5) } + let(:service) { described_class.new(repository) } + + shared_examples 'not interacting with redis' do + it 'does not interact with redis' do + expect(::Gitlab::Redis::Cache).not_to receive(:with) + + subject + end + end + + describe '#populate' do + subject { service.populate(tags) } + + context 'with tags' do + it 'gets values from redis' do + expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original + + expect(subject).to eq(0) + + tags.each { |t| expect(t.created_at).to eq(nil) } + end + + context 'with cached values' do + let(:cached_tags) { tags.first(2) } + + before do + ::Gitlab::Redis::Cache.with do |redis| + cached_tags.each do |tag| + redis.set(cache_key(tag), rfc3339(10.days.ago)) + end + end + end + + it 'gets values from redis' do + expect(::Gitlab::Redis::Cache).to receive(:with).and_call_original + + expect(subject).to eq(2) + + cached_tags.each { |t| expect(t.created_at).not_to eq(nil) } + (tags - cached_tags).each { |t| expect(t.created_at).to eq(nil) } + end + end + end + + context 'with no tags' do + let(:tags) { [] } + + it_behaves_like 'not interacting with redis' + end + end + + describe '#insert' do + let(:max_ttl) { 90.days } + + subject { service.insert(tags, max_ttl) } + + context 'with tags' do + let(:tag) { tags.first } + let(:ttl) { 90.days - 3.days } + + before do + travel_to(Time.zone.local(2021, 9, 2, 12, 0, 0)) + + tag.created_at = DateTime.rfc3339(3.days.ago.rfc3339) + end + + after do + travel_back + end + + it 'inserts values in redis' do + ::Gitlab::Redis::Cache.with do |redis| + expect(redis) + .to receive(:set) + .with(cache_key(tag), rfc3339(tag.created_at), ex: ttl.to_i) + .and_call_original + end + + subject + end + + context 'with some of them already cached' do + let(:tag) { tags.first } + + before do + ::Gitlab::Redis::Cache.with do |redis| + redis.set(cache_key(tag), rfc3339(10.days.ago)) + end + service.populate(tags) + end + + it_behaves_like 'not interacting with redis' + end + end + + context 'with no tags' do + let(:tags) { [] } + + it_behaves_like 'not interacting with redis' + end + + context 'with no expires_in' do + let(:max_ttl) { nil } + + it_behaves_like 'not interacting with redis' + end + end + + def create_tags(size) + Array.new(size) do |i| + dummy_tag_class.new("Tag #{i}", nil) + end + end + + def cache_key(tag) + "container_repository:{#{repository.id}}:tag:#{tag.name}:created_at" + end + + def rfc3339(date_time) + # DateTime rfc3339 is different ActiveSupport::TimeWithZone rfc3339 + # The caching will use DateTime rfc3339 + DateTime.rfc3339(date_time.rfc3339).rfc3339 + end +end diff --git a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb index 3ec332dace5..c0476d38380 100644 --- a/spec/lib/gitlab/content_security_policy/config_loader_spec.rb +++ b/spec/lib/gitlab/content_security_policy/config_loader_spec.rb @@ -50,7 +50,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do expect(directives.has_key?('report_uri')).to be_truthy expect(directives['report_uri']).to be_nil - expect(directives['child_src']).to eq(directives['frame_src']) + expect(directives['child_src']).to eq("#{directives['frame_src']} #{directives['worker_src']}") end context 'adds all websocket origins to support Safari' do @@ -77,13 +77,15 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do context 'when CDN host is defined' do before do - stub_config_setting(cdn_host: 'https://example.com') + stub_config_setting(cdn_host: 'https://cdn.example.com') end it 'adds CDN host to CSP' do - expect(directives['script_src']).to eq("'strict-dynamic' 'self' 'unsafe-inline' 'unsafe-eval' https://www.google.com/recaptcha/ https://www.recaptcha.net https://apis.google.com https://example.com") - expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://example.com") - expect(directives['font_src']).to eq("'self' https://example.com") + expect(directives['script_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.script_src + " https://cdn.example.com") + expect(directives['style_src']).to eq("'self' 'unsafe-inline' https://cdn.example.com") + expect(directives['font_src']).to eq("'self' https://cdn.example.com") + expect(directives['worker_src']).to eq('http://localhost/assets/ blob: data: https://cdn.example.com') + expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " https://cdn.example.com http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html") end end @@ -99,8 +101,10 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end context 'when CUSTOMER_PORTAL_URL is set' do + let(:customer_portal_url) { 'https://customers.example.com' } + before do - stub_env('CUSTOMER_PORTAL_URL', 'https://customers.example.com') + stub_env('CUSTOMER_PORTAL_URL', customer_portal_url) end context 'when in production' do @@ -109,7 +113,7 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end it 'does not add CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com") + expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html") end end @@ -119,7 +123,36 @@ RSpec.describe Gitlab::ContentSecurityPolicy::ConfigLoader do end it 'adds CUSTOMER_PORTAL_URL to CSP' do - expect(directives['frame_src']).to eq("'self' https://www.google.com/recaptcha/ https://www.recaptcha.net/ https://content.googleapis.com https://content-compute.googleapis.com https://content-cloudbilling.googleapis.com https://content-cloudresourcemanager.googleapis.com https://customers.example.com") + expect(directives['frame_src']).to eq(::Gitlab::ContentSecurityPolicy::Directives.frame_src + " http://localhost/rails/letter_opener/ https://customers.example.com http://localhost/admin/sidekiq http://localhost/admin/sidekiq/ http://localhost/-/speedscope/index.html") + end + end + end + + context 'letter_opener applicaiton URL' do + let(:gitlab_url) { 'http://gitlab.example.com' } + let(:letter_opener_url) { "#{gitlab_url}/rails/letter_opener/" } + + before do + stub_config_setting(url: gitlab_url) + end + + context 'when in production' do + before do + allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('production')) + end + + it 'does not add letter_opener to CSP' do + expect(directives['frame_src']).not_to include(letter_opener_url) + end + end + + context 'when in development' do + before do + allow(Rails).to receive(:env).and_return(ActiveSupport::StringInquirer.new('development')) + end + + it 'adds letter_opener to CSP' do + expect(directives['frame_src']).to include(letter_opener_url) end end end diff --git a/spec/lib/gitlab/contributions_calendar_spec.rb b/spec/lib/gitlab/contributions_calendar_spec.rb index 67b2ea7a1d4..384609c6664 100644 --- a/spec/lib/gitlab/contributions_calendar_spec.rb +++ b/spec/lib/gitlab/contributions_calendar_spec.rb @@ -5,6 +5,7 @@ require 'spec_helper' RSpec.describe Gitlab::ContributionsCalendar do let(:contributor) { create(:user) } let(:user) { create(:user) } + let(:travel_time) { nil } let(:private_project) do create(:project, :private) do |project| @@ -31,7 +32,7 @@ RSpec.describe Gitlab::ContributionsCalendar do let(:last_year) { today - 1.year } before do - travel_to Time.now.utc.end_of_day + travel_to travel_time || Time.now.utc.end_of_day end after do @@ -89,7 +90,7 @@ RSpec.describe Gitlab::ContributionsCalendar do expect(calendar(contributor).activity_dates[today]).to eq(2) end - context "when events fall under different dates depending on the time zone" do + context "when events fall under different dates depending on the system time zone" do before do create_event(public_project, today, 1) create_event(public_project, today, 4) @@ -116,6 +117,37 @@ RSpec.describe Gitlab::ContributionsCalendar do end end end + + context "when events fall under different dates depending on the contributor's time zone" do + before do + create_event(public_project, today, 1) + create_event(public_project, today, 4) + create_event(public_project, today, 10) + create_event(public_project, today, 16) + create_event(public_project, today, 23) + end + + it "renders correct event counts within the UTC timezone" do + Time.use_zone('UTC') do + contributor.timezone = 'UTC' + expect(calendar.activity_dates).to eq(today => 5) + end + end + + it "renders correct event counts within the Sydney timezone" do + Time.use_zone('UTC') do + contributor.timezone = 'Sydney' + expect(calendar.activity_dates).to eq(today => 3, tomorrow => 2) + end + end + + it "renders correct event counts within the US Central timezone" do + Time.use_zone('UTC') do + contributor.timezone = 'Central Time (US & Canada)' + expect(calendar.activity_dates).to eq(yesterday => 2, today => 3) + end + end + end end describe '#events_by_date' do @@ -152,14 +184,38 @@ RSpec.describe Gitlab::ContributionsCalendar do end describe '#starting_year' do - it "is the start of last year" do - expect(calendar.starting_year).to eq(last_year.year) + let(:travel_time) { Time.find_zone('UTC').local(2020, 12, 31, 19, 0, 0) } + + context "when the contributor's timezone is not set" do + it "is the start of last year in the system timezone" do + expect(calendar.starting_year).to eq(2019) + end + end + + context "when the contributor's timezone is set to Sydney" do + let(:contributor) { create(:user, { timezone: 'Sydney' }) } + + it "is the start of last year in Sydney" do + expect(calendar.starting_year).to eq(2020) + end end end describe '#starting_month' do - it "is the start of this month" do - expect(calendar.starting_month).to eq(today.month) + let(:travel_time) { Time.find_zone('UTC').local(2020, 12, 31, 19, 0, 0) } + + context "when the contributor's timezone is not set" do + it "is the start of this month in the system timezone" do + expect(calendar.starting_month).to eq(12) + end + end + + context "when the contributor's timezone is set to Sydney" do + let(:contributor) { create(:user, { timezone: 'Sydney' }) } + + it "is the start of this month in Sydney" do + expect(calendar.starting_month).to eq(1) + end end end end diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb index 434cba4edde..223730f87c0 100644 --- a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb +++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb @@ -3,6 +3,8 @@ require 'spec_helper' RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model do + it { is_expected.to be_a Gitlab::Database::SharedModel } + describe 'validations' do let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH } let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH } diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb index 779e8e40c97..04c18a98ee6 100644 --- a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb +++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb @@ -286,7 +286,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do let(:migration_wrapper) { Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper.new } let(:migration_helpers) { ActiveRecord::Migration.new } - let(:table_name) { :_batched_migrations_test_table } + let(:table_name) { :_test_batched_migrations_test_table } let(:column_name) { :some_id } let(:job_arguments) { [:some_id, :some_id_convert_to_bigint] } diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb index da13bc425d1..9831510f014 100644 --- a/spec/lib/gitlab/database/batch_count_spec.rb +++ b/spec/lib/gitlab/database/batch_count_spec.rb @@ -19,7 +19,7 @@ RSpec.describe Gitlab::Database::BatchCount do end before do - allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction) + allow(model.connection).to receive(:transaction_open?).and_return(in_transaction) end def calculate_batch_size(batch_size) diff --git a/spec/lib/gitlab/database/connection_spec.rb b/spec/lib/gitlab/database/connection_spec.rb deleted file mode 100644 index ee1df141cd6..00000000000 --- a/spec/lib/gitlab/database/connection_spec.rb +++ /dev/null @@ -1,442 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::Connection do - let(:connection) { described_class.new } - - describe '#config' do - it 'returns a HashWithIndifferentAccess' do - expect(connection.config).to be_an_instance_of(HashWithIndifferentAccess) - end - - it 'returns a default pool size' do - expect(connection.config) - .to include(pool: Gitlab::Database.default_pool_size) - end - - it 'does not cache its results' do - a = connection.config - b = connection.config - - expect(a).not_to equal(b) - end - end - - describe '#pool_size' do - context 'when no explicit size is configured' do - it 'returns the default pool size' do - expect(connection).to receive(:config).and_return({ pool: nil }) - - expect(connection.pool_size).to eq(Gitlab::Database.default_pool_size) - end - end - - context 'when an explicit pool size is set' do - it 'returns the pool size' do - expect(connection).to receive(:config).and_return({ pool: 4 }) - - expect(connection.pool_size).to eq(4) - end - end - end - - describe '#username' do - context 'when a username is set' do - it 'returns the username' do - allow(connection).to receive(:config).and_return(username: 'bob') - - expect(connection.username).to eq('bob') - end - end - - context 'when a username is not set' do - it 'returns the value of the USER environment variable' do - allow(connection).to receive(:config).and_return(username: nil) - allow(ENV).to receive(:[]).with('USER').and_return('bob') - - expect(connection.username).to eq('bob') - end - end - end - - describe '#database_name' do - it 'returns the name of the database' do - allow(connection).to receive(:config).and_return(database: 'test') - - expect(connection.database_name).to eq('test') - end - end - - describe '#adapter_name' do - it 'returns the database adapter name' do - allow(connection).to receive(:config).and_return(adapter: 'test') - - expect(connection.adapter_name).to eq('test') - end - end - - describe '#human_adapter_name' do - context 'when the adapter is PostgreSQL' do - it 'returns PostgreSQL' do - allow(connection).to receive(:config).and_return(adapter: 'postgresql') - - expect(connection.human_adapter_name).to eq('PostgreSQL') - end - end - - context 'when the adapter is not PostgreSQL' do - it 'returns Unknown' do - allow(connection).to receive(:config).and_return(adapter: 'kittens') - - expect(connection.human_adapter_name).to eq('Unknown') - end - end - end - - describe '#postgresql?' do - context 'when using PostgreSQL' do - it 'returns true' do - allow(connection).to receive(:adapter_name).and_return('PostgreSQL') - - expect(connection.postgresql?).to eq(true) - end - end - - context 'when not using PostgreSQL' do - it 'returns false' do - allow(connection).to receive(:adapter_name).and_return('MySQL') - - expect(connection.postgresql?).to eq(false) - end - end - end - - describe '#db_config_with_default_pool_size' do - it 'returns db_config with our default pool size' do - allow(Gitlab::Database).to receive(:default_pool_size).and_return(9) - - expect(connection.db_config_with_default_pool_size.pool).to eq(9) - end - - it 'returns db_config with the correct database name' do - db_name = connection.scope.connection.pool.db_config.name - - expect(connection.db_config_with_default_pool_size.name).to eq(db_name) - end - end - - describe '#disable_prepared_statements', :reestablished_active_record_base do - it 'disables prepared statements' do - connection.scope.establish_connection( - ::Gitlab::Database.main.config.merge(prepared_statements: true) - ) - - expect(connection.scope.connection.prepared_statements).to eq(true) - - connection.disable_prepared_statements - - expect(connection.scope.connection.prepared_statements).to eq(false) - end - - it 'retains the connection name' do - connection.disable_prepared_statements - - expect(connection.scope.connection_db_config.name).to eq('main') - end - - context 'with dynamic connection pool size' do - before do - connection.scope.establish_connection(connection.config.merge(pool: 7)) - end - - it 'retains the set pool size' do - connection.disable_prepared_statements - - expect(connection.scope.connection.prepared_statements).to eq(false) - expect(connection.scope.connection.pool.size).to eq(7) - end - end - end - - describe '#db_read_only?' do - it 'detects a read-only database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => "t" }]) - - expect(connection.db_read_only?).to be_truthy - end - - it 'detects a read-only database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => true }]) - - expect(connection.db_read_only?).to be_truthy - end - - it 'detects a read-write database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => "f" }]) - - expect(connection.db_read_only?).to be_falsey - end - - it 'detects a read-write database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => false }]) - - expect(connection.db_read_only?).to be_falsey - end - end - - describe '#db_read_write?' do - it 'detects a read-only database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => "t" }]) - - expect(connection.db_read_write?).to eq(false) - end - - it 'detects a read-only database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => true }]) - - expect(connection.db_read_write?).to eq(false) - end - - it 'detects a read-write database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => "f" }]) - - expect(connection.db_read_write?).to eq(true) - end - - it 'detects a read-write database' do - allow(connection.scope.connection) - .to receive(:execute) - .with('SELECT pg_is_in_recovery()') - .and_return([{ "pg_is_in_recovery" => false }]) - - expect(connection.db_read_write?).to eq(true) - end - end - - describe '#version' do - around do |example| - connection.instance_variable_set(:@version, nil) - example.run - connection.instance_variable_set(:@version, nil) - end - - context "on postgresql" do - it "extracts the version number" do - allow(connection) - .to receive(:database_version) - .and_return("PostgreSQL 9.4.4 on x86_64-apple-darwin14.3.0") - - expect(connection.version).to eq '9.4.4' - end - end - - it 'memoizes the result' do - count = ActiveRecord::QueryRecorder - .new { 2.times { connection.version } } - .count - - expect(count).to eq(1) - end - end - - describe '#postgresql_minimum_supported_version?' do - it 'returns false when using PostgreSQL 10' do - allow(connection).to receive(:version).and_return('10') - - expect(connection.postgresql_minimum_supported_version?).to eq(false) - end - - it 'returns false when using PostgreSQL 11' do - allow(connection).to receive(:version).and_return('11') - - expect(connection.postgresql_minimum_supported_version?).to eq(false) - end - - it 'returns true when using PostgreSQL 12' do - allow(connection).to receive(:version).and_return('12') - - expect(connection.postgresql_minimum_supported_version?).to eq(true) - end - end - - describe '#bulk_insert' do - before do - allow(connection).to receive(:connection).and_return(dummy_connection) - allow(dummy_connection).to receive(:quote_column_name, &:itself) - allow(dummy_connection).to receive(:quote, &:itself) - allow(dummy_connection).to receive(:execute) - end - - let(:dummy_connection) { double(:connection) } - - let(:rows) do - [ - { a: 1, b: 2, c: 3 }, - { c: 6, a: 4, b: 5 } - ] - end - - it 'does nothing with empty rows' do - expect(dummy_connection).not_to receive(:execute) - - connection.bulk_insert('test', []) - end - - it 'uses the ordering from the first row' do - expect(dummy_connection).to receive(:execute) do |sql| - expect(sql).to include('(1, 2, 3)') - expect(sql).to include('(4, 5, 6)') - end - - connection.bulk_insert('test', rows) - end - - it 'quotes column names' do - expect(dummy_connection).to receive(:quote_column_name).with(:a) - expect(dummy_connection).to receive(:quote_column_name).with(:b) - expect(dummy_connection).to receive(:quote_column_name).with(:c) - - connection.bulk_insert('test', rows) - end - - it 'quotes values' do - 1.upto(6) do |i| - expect(dummy_connection).to receive(:quote).with(i) - end - - connection.bulk_insert('test', rows) - end - - it 'does not quote values of a column in the disable_quote option' do - [1, 2, 4, 5].each do |i| - expect(dummy_connection).to receive(:quote).with(i) - end - - connection.bulk_insert('test', rows, disable_quote: :c) - end - - it 'does not quote values of columns in the disable_quote option' do - [2, 5].each do |i| - expect(dummy_connection).to receive(:quote).with(i) - end - - connection.bulk_insert('test', rows, disable_quote: [:a, :c]) - end - - it 'handles non-UTF-8 data' do - expect { connection.bulk_insert('test', [{ a: "\255" }]) }.not_to raise_error - end - - context 'when using PostgreSQL' do - it 'allows the returning of the IDs of the inserted rows' do - result = double(:result, values: [['10']]) - - expect(dummy_connection) - .to receive(:execute) - .with(/RETURNING id/) - .and_return(result) - - ids = connection - .bulk_insert('test', [{ number: 10 }], return_ids: true) - - expect(ids).to eq([10]) - end - - it 'allows setting the upsert to do nothing' do - expect(dummy_connection) - .to receive(:execute) - .with(/ON CONFLICT DO NOTHING/) - - connection - .bulk_insert('test', [{ number: 10 }], on_conflict: :do_nothing) - end - end - end - - describe '#cached_column_exists?' do - it 'only retrieves the data from the schema cache' do - queries = ActiveRecord::QueryRecorder.new do - 2.times do - expect(connection.cached_column_exists?(:projects, :id)).to be_truthy - expect(connection.cached_column_exists?(:projects, :bogus_column)).to be_falsey - end - end - - expect(queries.count).to eq(0) - end - end - - describe '#cached_table_exists?' do - it 'only retrieves the data from the schema cache' do - queries = ActiveRecord::QueryRecorder.new do - 2.times do - expect(connection.cached_table_exists?(:projects)).to be_truthy - expect(connection.cached_table_exists?(:bogus_table_name)).to be_falsey - end - end - - expect(queries.count).to eq(0) - end - - it 'returns false when database does not exist' do - expect(connection.scope).to receive(:connection) do - raise ActiveRecord::NoDatabaseError, 'broken' - end - - expect(connection.cached_table_exists?(:projects)).to be(false) - end - end - - describe '#exists?' do - it 'returns true if the database exists' do - expect(connection.exists?).to be(true) - end - - it "returns false if the database doesn't exist" do - expect(connection.scope.connection.schema_cache) - .to receive(:database_version) - .and_raise(ActiveRecord::NoDatabaseError) - - expect(connection.exists?).to be(false) - end - end - - describe '#system_id' do - it 'returns the PostgreSQL system identifier' do - expect(connection.system_id).to be_an_instance_of(Integer) - end - end - - describe '#get_write_location' do - it 'returns a string' do - expect(connection.get_write_location(connection.scope.connection)) - .to be_a(String) - end - - it 'returns nil if there are no results' do - expect(connection.get_write_location(double(select_all: []))).to be_nil - end - end -end diff --git a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb index cdcc862c376..9d49db1f018 100644 --- a/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb +++ b/spec/lib/gitlab/database/count/reltuples_count_strategy_spec.rb @@ -38,7 +38,8 @@ RSpec.describe Gitlab::Database::Count::ReltuplesCountStrategy do it 'returns nil counts for inherited tables' do models.each { |model| expect(model).not_to receive(:count) } - expect(subject).to eq({ Namespace => 3 }) + # 3 Namespaces as parents for each Project and 3 ProjectNamespaces(for each Project) + expect(subject).to eq({ Namespace => 6 }) end end diff --git a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb index c2028f8c238..2f261aebf02 100644 --- a/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb +++ b/spec/lib/gitlab/database/count/tablesample_count_strategy_spec.rb @@ -47,7 +47,8 @@ RSpec.describe Gitlab::Database::Count::TablesampleCountStrategy do result = subject expect(result[Project]).to eq(3) expect(result[Group]).to eq(1) - expect(result[Namespace]).to eq(4) + # 1-Group, 3 namespaces for each project and 3 project namespaces for each project + expect(result[Namespace]).to eq(7) end end diff --git a/spec/lib/gitlab/database/each_database_spec.rb b/spec/lib/gitlab/database/each_database_spec.rb new file mode 100644 index 00000000000..9327fc4ff78 --- /dev/null +++ b/spec/lib/gitlab/database/each_database_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::EachDatabase do + describe '.each_database_connection' do + let(:expected_connections) do + Gitlab::Database.database_base_models.map { |name, model| [model.connection, name] } + end + + it 'yields each connection after connecting SharedModel' do + expected_connections.each do |connection, _| + expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(connection).and_yield + end + + yielded_connections = [] + + described_class.each_database_connection do |connection, name| + yielded_connections << [connection, name] + end + + expect(yielded_connections).to match_array(expected_connections) + end + end + + describe '.each_model_connection' do + let(:model1) { double(connection: double, table_name: 'table1') } + let(:model2) { double(connection: double, table_name: 'table2') } + + before do + allow(model1.connection).to receive_message_chain('pool.db_config.name').and_return('name1') + allow(model2.connection).to receive_message_chain('pool.db_config.name').and_return('name2') + end + + it 'yields each model after connecting SharedModel' do + expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(model1.connection).and_yield + expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(model2.connection).and_yield + + yielded_models = [] + + described_class.each_model_connection([model1, model2]) do |model, name| + yielded_models << [model, name] + end + + expect(yielded_models).to match_array([[model1, 'name1'], [model2, 'name2']]) + end + end +end diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb new file mode 100644 index 00000000000..255efc99ff6 --- /dev/null +++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb @@ -0,0 +1,58 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe Gitlab::Database::GitlabSchema do + describe '.tables_to_schema' do + subject { described_class.tables_to_schema } + + it 'all tables have assigned a known gitlab_schema' do + is_expected.to all( + match([be_a(String), be_in([:gitlab_shared, :gitlab_main, :gitlab_ci])]) + ) + end + + # This being run across different databases indirectly also tests + # a general consistency of structure across databases + Gitlab::Database.database_base_models.each do |db_config_name, db_class| + let(:db_data_sources) { db_class.connection.data_sources } + + context "for #{db_config_name} using #{db_class}" do + it 'new data sources are added' do + missing_tables = db_data_sources.to_set - subject.keys + + expect(missing_tables).to be_empty, \ + "Missing table(s) #{missing_tables.to_a} not found in #{described_class}.tables_to_schema. " \ + "Any new tables must be added to lib/gitlab/database/gitlab_schemas.yml." + end + + it 'non-existing data sources are removed' do + extra_tables = subject.keys.to_set - db_data_sources + + expect(extra_tables).to be_empty, \ + "Extra table(s) #{extra_tables.to_a} found in #{described_class}.tables_to_schema. " \ + "Any removed or renamed tables must be removed from lib/gitlab/database/gitlab_schemas.yml." + end + end + end + end + + describe '.table_schema' do + using RSpec::Parameterized::TableSyntax + + where(:name, :classification) do + 'ci_builds' | :gitlab_ci + 'my_schema.ci_builds' | :gitlab_ci + 'information_schema.columns' | :gitlab_shared + 'audit_events_part_5fc467ac26' | :gitlab_main + '_test_my_table' | :gitlab_shared + 'pg_attribute' | :gitlab_shared + 'my_other_table' | :undefined_my_other_table + end + + with_them do + subject { described_class.table_schema(name) } + + it { is_expected.to eq(classification) } + end + end +end diff --git a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb index 3e5249a3dea..eef248afdf2 100644 --- a/spec/lib/gitlab/database/load_balancing/configuration_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/configuration_spec.rb @@ -3,17 +3,12 @@ require 'spec_helper' RSpec.describe Gitlab::Database::LoadBalancing::Configuration do - let(:model) do - config = ActiveRecord::DatabaseConfigurations::HashConfig - .new('main', 'test', configuration_hash) - - double(:model, connection_db_config: config) - end + let(:configuration_hash) { {} } + let(:db_config) { ActiveRecord::DatabaseConfigurations::HashConfig.new('test', 'ci', configuration_hash) } + let(:model) { double(:model, connection_db_config: db_config) } describe '.for_model' do context 'when load balancing is not configured' do - let(:configuration_hash) { {} } - it 'uses the default settings' do config = described_class.for_model(model) @@ -105,6 +100,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration do expect(config.pool_size).to eq(4) end end + + it 'calls reuse_primary_connection!' do + expect_next_instance_of(described_class) do |subject| + expect(subject).to receive(:reuse_primary_connection!).and_call_original + end + + described_class.for_model(model) + end end describe '#load_balancing_enabled?' do @@ -180,4 +183,60 @@ RSpec.describe Gitlab::Database::LoadBalancing::Configuration do end end end + + describe '#db_config_name' do + let(:config) { described_class.new(model) } + + subject { config.db_config_name } + + it 'returns connection name as symbol' do + is_expected.to eq(:ci) + end + end + + describe '#replica_db_config' do + let(:model) { double(:model, connection_db_config: db_config, connection_specification_name: 'Ci::ApplicationRecord') } + let(:config) { described_class.for_model(model) } + + it 'returns exactly db_config' do + expect(config.replica_db_config).to eq(db_config) + end + + context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do + it 'does not change replica_db_config' do + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main') + + expect(config.replica_db_config).to eq(db_config) + end + end + end + + describe 'reuse_primary_connection!' do + let(:model) { double(:model, connection_db_config: db_config, connection_specification_name: 'Ci::ApplicationRecord') } + let(:config) { described_class.for_model(model) } + + context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_* not configured' do + it 'the primary connection uses default specification' do + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', nil) + + expect(config.primary_connection_specification_name).to eq('Ci::ApplicationRecord') + end + end + + context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do + it 'the primary connection uses main connection' do + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main') + + expect(config.primary_connection_specification_name).to eq('ActiveRecord::Base') + end + end + + context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=unknown' do + it 'raises exception' do + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'unknown') + + expect { config.reuse_primary_connection! }.to raise_error /Invalid value for/ + end + end + end end diff --git a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb index ba2f9485066..ee2718171c0 100644 --- a/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/connection_proxy_spec.rb @@ -3,12 +3,9 @@ require 'spec_helper' RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do - let(:proxy) do - config = Gitlab::Database::LoadBalancing::Configuration - .new(ActiveRecord::Base) - - described_class.new(Gitlab::Database::LoadBalancing::LoadBalancer.new(config)) - end + let(:config) { Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base) } + let(:load_balancer) { Gitlab::Database::LoadBalancing::LoadBalancer.new(config) } + let(:proxy) { described_class.new(load_balancer) } describe '#select' do it 'performs a read' do @@ -85,7 +82,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do describe '.insert_all!' do before do ActiveRecord::Schema.define do - create_table :connection_proxy_bulk_insert, force: true do |t| + create_table :_test_connection_proxy_bulk_insert, force: true do |t| t.string :name, null: true end end @@ -93,13 +90,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do after do ActiveRecord::Schema.define do - drop_table :connection_proxy_bulk_insert, force: true + drop_table :_test_connection_proxy_bulk_insert, force: true end end let(:model_class) do Class.new(ApplicationRecord) do - self.table_name = "connection_proxy_bulk_insert" + self.table_name = "_test_connection_proxy_bulk_insert" end end @@ -143,9 +140,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do context 'with a read query' do it 'runs the transaction and any nested queries on the replica' do - expect(proxy.load_balancer).to receive(:read) + expect(load_balancer).to receive(:read) .twice.and_yield(replica) - expect(proxy.load_balancer).not_to receive(:read_write) + expect(load_balancer).not_to receive(:read_write) expect(session).not_to receive(:write!) proxy.transaction { proxy.select('true') } @@ -154,8 +151,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do context 'with a write query' do it 'raises an exception' do - allow(proxy.load_balancer).to receive(:read).and_yield(replica) - allow(proxy.load_balancer).to receive(:read_write).and_yield(replica) + allow(load_balancer).to receive(:read).and_yield(replica) + allow(load_balancer).to receive(:read_write).and_yield(replica) expect do proxy.transaction { proxy.insert('something') } @@ -178,9 +175,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do context 'with a read query' do it 'runs the transaction and any nested queries on the primary and stick to it' do - expect(proxy.load_balancer).to receive(:read_write) + expect(load_balancer).to receive(:read_write) .twice.and_yield(primary) - expect(proxy.load_balancer).not_to receive(:read) + expect(load_balancer).not_to receive(:read) expect(session).to receive(:write!) proxy.transaction { proxy.select('true') } @@ -189,9 +186,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do context 'with a write query' do it 'runs the transaction and any nested queries on the primary and stick to it' do - expect(proxy.load_balancer).to receive(:read_write) + expect(load_balancer).to receive(:read_write) .twice.and_yield(primary) - expect(proxy.load_balancer).not_to receive(:read) + expect(load_balancer).not_to receive(:read) expect(session).to receive(:write!).twice proxy.transaction { proxy.insert('something') } @@ -209,7 +206,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do end it 'properly forwards keyword arguments' do - allow(proxy.load_balancer).to receive(:read_write) + allow(load_balancer).to receive(:read_write) expect(proxy).to receive(:write_using_load_balancer).and_call_original @@ -234,7 +231,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do end it 'properly forwards keyword arguments' do - allow(proxy.load_balancer).to receive(:read) + allow(load_balancer).to receive(:read) expect(proxy).to receive(:read_using_load_balancer).and_call_original @@ -259,7 +256,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do allow(session).to receive(:use_replicas_for_read_queries?).and_return(false) expect(connection).to receive(:foo).with('foo') - expect(proxy.load_balancer).to receive(:read).and_yield(connection) + expect(load_balancer).to receive(:read).and_yield(connection) proxy.read_using_load_balancer(:foo, 'foo') end @@ -271,7 +268,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do allow(session).to receive(:use_replicas_for_read_queries?).and_return(true) expect(connection).to receive(:foo).with('foo') - expect(proxy.load_balancer).to receive(:read).and_yield(connection) + expect(load_balancer).to receive(:read).and_yield(connection) proxy.read_using_load_balancer(:foo, 'foo') end @@ -283,7 +280,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do allow(session).to receive(:use_replicas_for_read_queries?).and_return(true) expect(connection).to receive(:foo).with('foo') - expect(proxy.load_balancer).to receive(:read).and_yield(connection) + expect(load_balancer).to receive(:read).and_yield(connection) proxy.read_using_load_balancer(:foo, 'foo') end @@ -296,7 +293,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do expect(connection).to receive(:foo).with('foo') - expect(proxy.load_balancer).to receive(:read_write) + expect(load_balancer).to receive(:read_write) .and_yield(connection) proxy.read_using_load_balancer(:foo, 'foo') @@ -314,7 +311,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::ConnectionProxy do end it 'uses but does not stick to the primary' do - expect(proxy.load_balancer).to receive(:read_write).and_yield(connection) + expect(load_balancer).to receive(:read_write).and_yield(connection) expect(connection).to receive(:foo).with('foo') expect(session).not_to receive(:write!) diff --git a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb index f824d4cefdf..37b83729125 100644 --- a/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/load_balancer_spec.rb @@ -4,10 +4,11 @@ require 'spec_helper' RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do let(:conflict_error) { Class.new(RuntimeError) } - let(:db_host) { ActiveRecord::Base.connection_pool.db_config.host } + let(:model) { ActiveRecord::Base } + let(:db_host) { model.connection_pool.db_config.host } let(:config) do Gitlab::Database::LoadBalancing::Configuration - .new(ActiveRecord::Base, [db_host, db_host]) + .new(model, [db_host, db_host]) end let(:lb) { described_class.new(config) } @@ -88,6 +89,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do host = double(:host) allow(lb).to receive(:host).and_return(host) + allow(Rails.application.executor).to receive(:active?).and_return(true) allow(host).to receive(:query_cache_enabled).and_return(false) allow(host).to receive(:connection).and_return(connection) @@ -96,6 +98,20 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do lb.read { 10 } end + it 'does not enable query cache when outside Rails executor context' do + connection = double(:connection) + host = double(:host) + + allow(lb).to receive(:host).and_return(host) + allow(Rails.application.executor).to receive(:active?).and_return(false) + allow(host).to receive(:query_cache_enabled).and_return(false) + allow(host).to receive(:connection).and_return(connection) + + expect(host).not_to receive(:enable_query_cache!) + + lb.read { 10 } + end + it 'marks hosts that are offline' do allow(lb).to receive(:connection_error?).and_return(true) @@ -216,7 +232,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do it 'does not create conflicts with other load balancers when caching hosts' do ci_config = Gitlab::Database::LoadBalancing::Configuration - .new(Ci::CiDatabaseRecord, [db_host, db_host]) + .new(Ci::ApplicationRecord, [db_host, db_host]) lb1 = described_class.new(config) lb2 = described_class.new(ci_config) @@ -459,4 +475,84 @@ RSpec.describe Gitlab::Database::LoadBalancing::LoadBalancer, :request_store do lb.disconnect!(timeout: 30) end end + + describe '#get_write_location' do + it 'returns a string' do + expect(lb.send(:get_write_location, lb.pool.connection)) + .to be_a(String) + end + + it 'returns nil if there are no results' do + expect(lb.send(:get_write_location, double(select_all: []))).to be_nil + end + end + + describe 'primary connection re-use', :reestablished_active_record_base do + let(:model) { Ci::ApplicationRecord } + + around do |example| + if Gitlab::Database.has_config?(:ci) + example.run + else + # fake additional Database + model.establish_connection( + ActiveRecord::DatabaseConfigurations::HashConfig.new(Rails.env, 'ci', ActiveRecord::Base.connection_db_config.configuration_hash) + ) + + example.run + + # Cleanup connection_specification_name for Ci::ApplicationRecord + model.remove_connection + end + end + + describe '#read' do + it 'returns ci replica connection' do + expect { |b| lb.read(&b) }.to yield_with_args do |args| + expect(args.pool.db_config.name).to eq('ci_replica') + end + end + + context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do + it 'returns ci replica connection' do + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main') + + expect { |b| lb.read(&b) }.to yield_with_args do |args| + expect(args.pool.db_config.name).to eq('ci_replica') + end + end + end + end + + describe '#read_write' do + it 'returns Ci::ApplicationRecord connection' do + expect { |b| lb.read_write(&b) }.to yield_with_args do |args| + expect(args.pool.db_config.name).to eq('ci') + end + end + + context 'when GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci=main' do + it 'returns ActiveRecord::Base connection' do + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', 'main') + + expect { |b| lb.read_write(&b) }.to yield_with_args do |args| + expect(args.pool.db_config.name).to eq('main') + end + end + end + end + end + + describe '#wal_diff' do + it 'returns the diff between two write locations' do + loc1 = lb.send(:get_write_location, lb.pool.connection) + + create(:user) # This ensures we get a new WAL location + + loc2 = lb.send(:get_write_location, lb.pool.connection) + diff = lb.wal_diff(loc2, loc1) + + expect(diff).to be_positive + end + end end diff --git a/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb b/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb index 45d81808971..02c9499bedb 100644 --- a/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/primary_host_spec.rb @@ -51,7 +51,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::PrimaryHost do end describe '#offline!' do - it 'does nothing' do + it 'logs the event but does nothing else' do + expect(Gitlab::Database::LoadBalancing::Logger).to receive(:warn) + .with(hash_including(event: :host_offline)) + .and_call_original + expect(host.offline!).to be_nil end end diff --git a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb index af7e2a4b167..b768d4ecea3 100644 --- a/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/rack_middleware_spec.rb @@ -6,12 +6,12 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do let(:app) { double(:app) } let(:middleware) { described_class.new(app) } let(:warden_user) { double(:warden, user: double(:user, id: 42)) } - let(:single_sticking_object) { Set.new([[ActiveRecord::Base, :user, 42]]) } + let(:single_sticking_object) { Set.new([[ActiveRecord::Base.sticking, :user, 42]]) } let(:multiple_sticking_objects) do Set.new([ - [ActiveRecord::Base, :user, 42], - [ActiveRecord::Base, :runner, '123456789'], - [ActiveRecord::Base, :runner, '1234'] + [ActiveRecord::Base.sticking, :user, 42], + [ActiveRecord::Base.sticking, :runner, '123456789'], + [ActiveRecord::Base.sticking, :runner, '1234'] ]) end @@ -162,7 +162,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do it 'returns the warden user if present' do env = { 'warden' => warden_user } ids = Gitlab::Database::LoadBalancing.base_models.map do |model| - [model, :user, 42] + [model.sticking, :user, 42] end expect(middleware.sticking_namespaces(env)).to eq(ids) @@ -181,9 +181,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::RackMiddleware, :redis do env = { described_class::STICK_OBJECT => multiple_sticking_objects } expect(middleware.sticking_namespaces(env)).to eq([ - [ActiveRecord::Base, :user, 42], - [ActiveRecord::Base, :runner, '123456789'], - [ActiveRecord::Base, :runner, '1234'] + [ActiveRecord::Base.sticking, :user, 42], + [ActiveRecord::Base.sticking, :runner, '123456789'], + [ActiveRecord::Base.sticking, :runner, '1234'] ]) end end diff --git a/spec/lib/gitlab/database/load_balancing/setup_spec.rb b/spec/lib/gitlab/database/load_balancing/setup_spec.rb index 01646bc76ef..953d83d3b48 100644 --- a/spec/lib/gitlab/database/load_balancing/setup_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/setup_spec.rb @@ -7,19 +7,20 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do it 'sets up the load balancer' do setup = described_class.new(ActiveRecord::Base) - expect(setup).to receive(:disable_prepared_statements) - expect(setup).to receive(:setup_load_balancer) + expect(setup).to receive(:configure_connection) + expect(setup).to receive(:setup_connection_proxy) expect(setup).to receive(:setup_service_discovery) + expect(setup).to receive(:setup_feature_flag_to_model_load_balancing) setup.setup end end - describe '#disable_prepared_statements' do - it 'disables prepared statements and reconnects to the database' do + describe '#configure_connection' do + it 'configures pool, prepared statements and reconnects to the database' do config = double( :config, - configuration_hash: { host: 'localhost' }, + configuration_hash: { host: 'localhost', pool: 2, prepared_statements: true }, env_name: 'test', name: 'main' ) @@ -27,7 +28,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do expect(ActiveRecord::DatabaseConfigurations::HashConfig) .to receive(:new) - .with('test', 'main', { host: 'localhost', prepared_statements: false }) + .with('test', 'main', { + host: 'localhost', + prepared_statements: false, + pool: Gitlab::Database.default_pool_size + }) .and_call_original # HashConfig doesn't implement its own #==, so we can't directly compare @@ -36,11 +41,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do .to receive(:establish_connection) .with(an_instance_of(ActiveRecord::DatabaseConfigurations::HashConfig)) - described_class.new(model).disable_prepared_statements + described_class.new(model).configure_connection end end - describe '#setup_load_balancer' do + describe '#setup_connection_proxy' do it 'sets up the load balancer' do model = Class.new(ActiveRecord::Base) setup = described_class.new(model) @@ -54,9 +59,9 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do .with(setup.configuration) .and_return(lb) - setup.setup_load_balancer + setup.setup_connection_proxy - expect(model.connection.load_balancer).to eq(lb) + expect(model.load_balancer).to eq(lb) expect(model.sticking) .to be_an_instance_of(Gitlab::Database::LoadBalancing::Sticking) end @@ -77,7 +82,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do model = ActiveRecord::Base setup = described_class.new(model) sv = instance_spy(Gitlab::Database::LoadBalancing::ServiceDiscovery) - lb = model.connection.load_balancer allow(setup.configuration) .to receive(:service_discovery_enabled?) @@ -85,7 +89,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do allow(Gitlab::Database::LoadBalancing::ServiceDiscovery) .to receive(:new) - .with(lb, setup.configuration.service_discovery) + .with(setup.load_balancer, setup.configuration.service_discovery) .and_return(sv) expect(sv).to receive(:perform_service_discovery) @@ -98,7 +102,6 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do model = ActiveRecord::Base setup = described_class.new(model, start_service_discovery: true) sv = instance_spy(Gitlab::Database::LoadBalancing::ServiceDiscovery) - lb = model.connection.load_balancer allow(setup.configuration) .to receive(:service_discovery_enabled?) @@ -106,7 +109,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do allow(Gitlab::Database::LoadBalancing::ServiceDiscovery) .to receive(:new) - .with(lb, setup.configuration.service_discovery) + .with(setup.load_balancer, setup.configuration.service_discovery) .and_return(sv) expect(sv).to receive(:perform_service_discovery) @@ -116,4 +119,181 @@ RSpec.describe Gitlab::Database::LoadBalancing::Setup do end end end + + describe '#setup_feature_flag_to_model_load_balancing', :reestablished_active_record_base do + using RSpec::Parameterized::TableSyntax + + where do + { + "with model LB enabled it picks a dedicated CI connection" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: 'true', + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, + request_store_active: false, + ff_use_model_load_balancing: nil, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'ci_replica', write: 'ci' } + } + }, + "with model LB enabled and re-use of primary connection it uses CI connection for reads" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: 'true', + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main', + request_store_active: false, + ff_use_model_load_balancing: nil, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'ci_replica', write: 'main' } + } + }, + "with model LB disabled it fallbacks to use main" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: 'false', + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, + request_store_active: false, + ff_use_model_load_balancing: nil, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'main_replica', write: 'main' } + } + }, + "with model LB disabled, but re-use configured it fallbacks to use main" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: 'false', + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main', + request_store_active: false, + ff_use_model_load_balancing: nil, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'main_replica', write: 'main' } + } + }, + "with FF disabled without RequestStore it uses main" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, + request_store_active: false, + ff_use_model_load_balancing: false, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'main_replica', write: 'main' } + } + }, + "with FF enabled without RequestStore sticking of FF does not work, so it fallbacks to use main" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, + request_store_active: false, + ff_use_model_load_balancing: true, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'main_replica', write: 'main' } + } + }, + "with FF disabled with RequestStore it uses main" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, + request_store_active: true, + ff_use_model_load_balancing: false, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'main_replica', write: 'main' } + } + }, + "with FF enabled with RequestStore it sticks FF and uses CI connection" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: nil, + request_store_active: true, + ff_use_model_load_balancing: true, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'ci_replica', write: 'ci' } + } + }, + "with re-use and FF enabled with RequestStore it sticks FF and uses CI connection for reads" => { + env_GITLAB_USE_MODEL_LOAD_BALANCING: nil, + env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci: 'main', + request_store_active: true, + ff_use_model_load_balancing: true, + expectations: { + main: { read: 'main_replica', write: 'main' }, + ci: { read: 'ci_replica', write: 'main' } + } + } + } + end + + with_them do + let(:ci_class) do + Class.new(ActiveRecord::Base) do + def self.name + 'Ci::ApplicationRecordTemporary' + end + + establish_connection ActiveRecord::DatabaseConfigurations::HashConfig.new( + Rails.env, + 'ci', + ActiveRecord::Base.connection_db_config.configuration_hash + ) + end + end + + let(:models) do + { + main: ActiveRecord::Base, + ci: ci_class + } + end + + around do |example| + if request_store_active + Gitlab::WithRequestStore.with_request_store do + RequestStore.clear! + + example.run + end + else + example.run + end + end + + before do + # Rewrite `class_attribute` to use rspec mocking and prevent modifying the objects + allow_next_instance_of(described_class) do |setup| + allow(setup).to receive(:configure_connection) + + allow(setup).to receive(:setup_class_attribute) do |attribute, value| + allow(setup.model).to receive(attribute) { value } + end + end + + stub_env('GITLAB_USE_MODEL_LOAD_BALANCING', env_GITLAB_USE_MODEL_LOAD_BALANCING) + stub_env('GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci', env_GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci) + stub_feature_flags(use_model_load_balancing: ff_use_model_load_balancing) + + # Make load balancer to force init with a dedicated replicas connections + models.each do |_, model| + described_class.new(model).tap do |subject| + subject.configuration.hosts = [subject.configuration.replica_db_config.host] + subject.setup + end + end + end + + it 'results match expectations' do + result = models.transform_values do |model| + load_balancer = model.connection.instance_variable_get(:@load_balancer) + + { + read: load_balancer.read { |connection| connection.pool.db_config.name }, + write: load_balancer.read_write { |connection| connection.pool.db_config.name } + } + end + + expect(result).to eq(expectations) + end + + it 'does return load_balancer assigned to a given connection' do + models.each do |name, model| + expect(model.load_balancer.name).to eq(name) + expect(model.sticking.instance_variable_get(:@load_balancer)).to eq(model.load_balancer) + end + end + end + end end diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb index 08dd6a0a788..9acf80e684f 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_client_middleware_spec.rb @@ -181,11 +181,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqClientMiddleware do end context 'when worker data consistency is :delayed' do - include_examples 'mark data consistency location', :delayed + include_examples 'mark data consistency location', :delayed end context 'when worker data consistency is :sticky' do - include_examples 'mark data consistency location', :sticky + include_examples 'mark data consistency location', :sticky end end end diff --git a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb index 06efdcd8f99..de2ad662d16 100644 --- a/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sidekiq_server_middleware_spec.rb @@ -64,7 +64,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ let(:wal_locations) { { Gitlab::Database::MAIN_DATABASE_NAME.to_sym => location } } it 'does not stick to the primary', :aggregate_failures do - expect(ActiveRecord::Base.connection.load_balancer) + expect(ActiveRecord::Base.load_balancer) .to receive(:select_up_to_date_host) .with(location) .and_return(true) @@ -107,7 +107,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'dedup_wal_locations' => wal_locations } } before do - allow(ActiveRecord::Base.connection.load_balancer) + allow(ActiveRecord::Base.load_balancer) .to receive(:select_up_to_date_host) .with(wal_locations[:main]) .and_return(true) @@ -120,7 +120,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::SidekiqServerMiddleware, :clean_ let(:job) { { 'job_id' => 'a180b47c-3fd6-41b8-81e9-34da61c3400e', 'database_write_location' => '0/D525E3A8' } } before do - allow(ActiveRecord::Base.connection.load_balancer) + allow(ActiveRecord::Base.load_balancer) .to receive(:select_up_to_date_host) .with('0/D525E3A8') .and_return(true) diff --git a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb index 8ceda52ee85..d88554614cf 100644 --- a/spec/lib/gitlab/database/load_balancing/sticking_spec.rb +++ b/spec/lib/gitlab/database/load_balancing/sticking_spec.rb @@ -4,7 +4,7 @@ require 'spec_helper' RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do let(:sticking) do - described_class.new(ActiveRecord::Base.connection.load_balancer) + described_class.new(ActiveRecord::Base.load_balancer) end after do @@ -22,7 +22,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do sticking.stick_or_unstick_request(env, :user, 42) expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a) - .to eq([[ActiveRecord::Base, :user, 42]]) + .to eq([[sticking, :user, 42]]) end it 'sticks or unsticks multiple objects and updates the Rack environment' do @@ -42,8 +42,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do sticking.stick_or_unstick_request(env, :runner, '123456789') expect(env[Gitlab::Database::LoadBalancing::RackMiddleware::STICK_OBJECT].to_a).to eq([ - [ActiveRecord::Base, :user, 42], - [ActiveRecord::Base, :runner, '123456789'] + [sticking, :user, 42], + [sticking, :runner, '123456789'] ]) end end @@ -73,7 +73,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do end describe '#all_caught_up?' do - let(:lb) { ActiveRecord::Base.connection.load_balancer } + let(:lb) { ActiveRecord::Base.load_balancer } let(:last_write_location) { 'foo' } before do @@ -137,7 +137,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do end describe '#unstick_or_continue_sticking' do - let(:lb) { ActiveRecord::Base.connection.load_balancer } + let(:lb) { ActiveRecord::Base.load_balancer } it 'simply returns if no write location could be found' do allow(sticking) @@ -182,13 +182,13 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do RSpec.shared_examples 'sticking' do before do - allow(ActiveRecord::Base.connection.load_balancer) + allow(ActiveRecord::Base.load_balancer) .to receive(:primary_write_location) .and_return('foo') end it 'sticks an entity to the primary', :aggregate_failures do - allow(ActiveRecord::Base.connection.load_balancer) + allow(ActiveRecord::Base.load_balancer) .to receive(:primary_only?) .and_return(false) @@ -227,11 +227,11 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do describe '#mark_primary_write_location' do it 'updates the write location with the load balancer' do - allow(ActiveRecord::Base.connection.load_balancer) + allow(ActiveRecord::Base.load_balancer) .to receive(:primary_write_location) .and_return('foo') - allow(ActiveRecord::Base.connection.load_balancer) + allow(ActiveRecord::Base.load_balancer) .to receive(:primary_only?) .and_return(false) @@ -291,7 +291,7 @@ RSpec.describe Gitlab::Database::LoadBalancing::Sticking, :redis do end describe '#select_caught_up_replicas' do - let(:lb) { ActiveRecord::Base.connection.load_balancer } + let(:lb) { ActiveRecord::Base.load_balancer } context 'with no write location' do before do diff --git a/spec/lib/gitlab/database/load_balancing_spec.rb b/spec/lib/gitlab/database/load_balancing_spec.rb index bf5314e2c34..65ffe539910 100644 --- a/spec/lib/gitlab/database/load_balancing_spec.rb +++ b/spec/lib/gitlab/database/load_balancing_spec.rb @@ -10,7 +10,7 @@ RSpec.describe Gitlab::Database::LoadBalancing do expect(models).to include(ActiveRecord::Base) if Gitlab::Database.has_config?(:ci) - expect(models).to include(Ci::CiDatabaseRecord) + expect(models).to include(Ci::ApplicationRecord) end end @@ -76,7 +76,7 @@ RSpec.describe Gitlab::Database::LoadBalancing do context 'when a read connection is used' do it 'returns :replica' do - proxy.load_balancer.read do |connection| + load_balancer.read do |connection| expect(described_class.db_role_for_connection(connection)).to eq(:replica) end end @@ -84,7 +84,7 @@ RSpec.describe Gitlab::Database::LoadBalancing do context 'when a read_write connection is used' do it 'returns :primary' do - proxy.load_balancer.read_write do |connection| + load_balancer.read_write do |connection| expect(described_class.db_role_for_connection(connection)).to eq(:primary) end end @@ -105,7 +105,7 @@ RSpec.describe Gitlab::Database::LoadBalancing do describe 'LoadBalancing integration tests', :database_replica, :delete do before(:all) do ActiveRecord::Schema.define do - create_table :load_balancing_test, force: true do |t| + create_table :_test_load_balancing_test, force: true do |t| t.string :name, null: true end end @@ -113,13 +113,13 @@ RSpec.describe Gitlab::Database::LoadBalancing do after(:all) do ActiveRecord::Schema.define do - drop_table :load_balancing_test, force: true + drop_table :_test_load_balancing_test, force: true end end let(:model) do Class.new(ApplicationRecord) do - self.table_name = "load_balancing_test" + self.table_name = "_test_load_balancing_test" end end @@ -443,7 +443,7 @@ RSpec.describe Gitlab::Database::LoadBalancing do elsif payload[:name] == 'SQL' # Custom query true else - keywords = %w[load_balancing_test] + keywords = %w[_test_load_balancing_test] keywords += %w[begin commit] if include_transaction keywords.any? { |keyword| payload[:sql].downcase.include?(keyword) } end diff --git a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb index 54b3ad22faf..f1dbfbbff18 100644 --- a/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb @@ -9,18 +9,18 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do let(:model) do Class.new(ApplicationRecord) do - self.table_name = 'loose_fk_test_table' + self.table_name = '_test_loose_fk_test_table' end end before(:all) do - migration.create_table :loose_fk_test_table do |t| + migration.create_table :_test_loose_fk_test_table do |t| t.timestamps end end after(:all) do - migration.drop_table :loose_fk_test_table + migration.drop_table :_test_loose_fk_test_table end before do @@ -37,7 +37,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do context 'when the record deletion tracker trigger is installed' do before do - migration.track_record_deletions(:loose_fk_test_table) + migration.track_record_deletions(:_test_loose_fk_test_table) end it 'stores the record deletion' do @@ -50,7 +50,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::LooseForeignKeyHelpers do deleted_record = LooseForeignKeys::DeletedRecord.all.first expect(deleted_record.primary_key_value).to eq(record_to_be_deleted.id) - expect(deleted_record.fully_qualified_table_name).to eq('public.loose_fk_test_table') + expect(deleted_record.fully_qualified_table_name).to eq('public._test_loose_fk_test_table') expect(deleted_record.partition).to eq(1) end diff --git a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb index 854e97ef897..acf775b3538 100644 --- a/spec/lib/gitlab/database/migration_helpers/v2_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers/v2_spec.rb @@ -20,7 +20,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do let(:model) { Class.new(ActiveRecord::Base) } before do - model.table_name = :test_table + model.table_name = :_test_table end context 'when called inside a transaction block' do @@ -30,19 +30,19 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do it 'raises an error' do expect do - migration.public_send(operation, :test_table, :original, :renamed) + migration.public_send(operation, :_test_table, :original, :renamed) end.to raise_error("#{operation} can not be run inside a transaction") end end context 'when the existing column has a default value' do before do - migration.change_column_default :test_table, existing_column, 'default value' + migration.change_column_default :_test_table, existing_column, 'default value' end it 'raises an error' do expect do - migration.public_send(operation, :test_table, :original, :renamed) + migration.public_send(operation, :_test_table, :original, :renamed) end.to raise_error("#{operation} does not currently support columns with default values") end end @@ -51,18 +51,18 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do context 'when the batch column does not exist' do it 'raises an error' do expect do - migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :missing) - end.to raise_error('Column missing does not exist on test_table') + migration.public_send(operation, :_test_table, :original, :renamed, batch_column_name: :missing) + end.to raise_error('Column missing does not exist on _test_table') end end context 'when the batch column does exist' do it 'passes it when creating the column' do expect(migration).to receive(:create_column_from) - .with(:test_table, existing_column, added_column, type: nil, batch_column_name: :status) + .with(:_test_table, existing_column, added_column, type: nil, batch_column_name: :status) .and_call_original - migration.public_send(operation, :test_table, :original, :renamed, batch_column_name: :status) + migration.public_send(operation, :_test_table, :original, :renamed, batch_column_name: :status) end end end @@ -71,17 +71,17 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do existing_record_1 = model.create!(status: 0, existing_column => 'existing') existing_record_2 = model.create!(status: 0, existing_column => nil) - migration.send(operation, :test_table, :original, :renamed) + migration.send(operation, :_test_table, :original, :renamed) model.reset_column_information - expect(migration.column_exists?(:test_table, added_column)).to eq(true) + expect(migration.column_exists?(:_test_table, added_column)).to eq(true) expect(existing_record_1.reload).to have_attributes(status: 0, original: 'existing', renamed: 'existing') expect(existing_record_2.reload).to have_attributes(status: 0, original: nil, renamed: nil) end it 'installs triggers to sync new data' do - migration.public_send(operation, :test_table, :original, :renamed) + migration.public_send(operation, :_test_table, :original, :renamed) model.reset_column_information new_record_1 = model.create!(status: 1, original: 'first') @@ -102,7 +102,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do before do allow(migration).to receive(:transaction_open?).and_return(false) - migration.create_table :test_table do |t| + migration.create_table :_test_table do |t| t.integer :status, null: false t.text :original t.text :other_column @@ -118,8 +118,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do context 'when the column to rename does not exist' do it 'raises an error' do expect do - migration.rename_column_concurrently :test_table, :missing_column, :renamed - end.to raise_error('Column missing_column does not exist on test_table') + migration.rename_column_concurrently :_test_table, :missing_column, :renamed + end.to raise_error('Column missing_column does not exist on _test_table') end end end @@ -128,7 +128,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do before do allow(migration).to receive(:transaction_open?).and_return(false) - migration.create_table :test_table do |t| + migration.create_table :_test_table do |t| t.integer :status, null: false t.text :other_column t.text :renamed @@ -144,8 +144,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do context 'when the renamed column does not exist' do it 'raises an error' do expect do - migration.undo_cleanup_concurrent_column_rename :test_table, :original, :missing_column - end.to raise_error('Column missing_column does not exist on test_table') + migration.undo_cleanup_concurrent_column_rename :_test_table, :original, :missing_column + end.to raise_error('Column missing_column does not exist on _test_table') end end end @@ -156,25 +156,25 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do before do allow(migration).to receive(:transaction_open?).and_return(false) - migration.create_table :test_table do |t| + migration.create_table :_test_table do |t| t.integer :status, null: false t.text :original t.text :other_column end - migration.rename_column_concurrently :test_table, :original, :renamed + migration.rename_column_concurrently :_test_table, :original, :renamed end context 'when the helper is called repeatedly' do before do - migration.public_send(operation, :test_table, :original, :renamed) + migration.public_send(operation, :_test_table, :original, :renamed) end it 'does not make repeated attempts to cleanup' do expect(migration).not_to receive(:remove_column) expect do - migration.public_send(operation, :test_table, :original, :renamed) + migration.public_send(operation, :_test_table, :original, :renamed) end.not_to raise_error end end @@ -182,26 +182,26 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do context 'when the renamed column exists' do let(:triggers) do [ - ['trigger_7cc71f92fd63', 'function_for_trigger_7cc71f92fd63', before: 'insert'], - ['trigger_f1a1f619636a', 'function_for_trigger_f1a1f619636a', before: 'update'], - ['trigger_769a49938884', 'function_for_trigger_769a49938884', before: 'update'] + ['trigger_020dbcb8cdd0', 'function_for_trigger_020dbcb8cdd0', before: 'insert'], + ['trigger_6edaca641d03', 'function_for_trigger_6edaca641d03', before: 'update'], + ['trigger_a3fb9f3add34', 'function_for_trigger_a3fb9f3add34', before: 'update'] ] end it 'removes the sync triggers and renamed columns' do triggers.each do |(trigger_name, function_name, event)| expect_function_to_exist(function_name) - expect_valid_function_trigger(:test_table, trigger_name, function_name, event) + expect_valid_function_trigger(:_test_table, trigger_name, function_name, event) end - expect(migration.column_exists?(:test_table, added_column)).to eq(true) + expect(migration.column_exists?(:_test_table, added_column)).to eq(true) - migration.public_send(operation, :test_table, :original, :renamed) + migration.public_send(operation, :_test_table, :original, :renamed) - expect(migration.column_exists?(:test_table, added_column)).to eq(false) + expect(migration.column_exists?(:_test_table, added_column)).to eq(false) triggers.each do |(trigger_name, function_name, _)| - expect_trigger_not_to_exist(:test_table, trigger_name) + expect_trigger_not_to_exist(:_test_table, trigger_name) expect_function_not_to_exist(function_name) end end @@ -223,7 +223,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do end describe '#create_table' do - let(:table_name) { :test_table } + let(:table_name) { :_test_table } let(:column_attributes) do [ { name: 'id', sql_type: 'bigint', null: false, default: nil }, @@ -245,7 +245,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::V2 do end expect_table_columns_to_match(column_attributes, table_name) - expect_check_constraint(table_name, 'check_cda6f69506', 'char_length(name) <= 100') + expect_check_constraint(table_name, 'check_e9982cf9da', 'char_length(name) <= 100') end end end diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb index d89af1521a2..ea755f5a368 100644 --- a/spec/lib/gitlab/database/migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migration_helpers_spec.rb @@ -31,16 +31,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do end describe '#add_timestamps_with_timezone' do - let(:in_transaction) { false } - - before do - allow(model).to receive(:transaction_open?).and_return(in_transaction) - allow(model).to receive(:disable_statement_timeout) - end - it 'adds "created_at" and "updated_at" fields with the "datetime_with_timezone" data type' do Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name| - expect(model).to receive(:add_column).with(:foo, column_name, :datetime_with_timezone, { null: false }) + expect(model).to receive(:add_column) + .with(:foo, column_name, :datetime_with_timezone, { default: nil, null: false }) end model.add_timestamps_with_timezone(:foo) @@ -48,7 +42,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do it 'can disable the NOT NULL constraint' do Gitlab::Database::MigrationHelpers::DEFAULT_TIMESTAMP_COLUMNS.each do |column_name| - expect(model).to receive(:add_column).with(:foo, column_name, :datetime_with_timezone, { null: true }) + expect(model).to receive(:add_column) + .with(:foo, column_name, :datetime_with_timezone, { default: nil, null: true }) end model.add_timestamps_with_timezone(:foo, null: true) @@ -64,9 +59,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers do it 'can add choice of acceptable columns' do expect(model).to receive(:add_column).with(:foo, :created_at, :datetime_with_timezone, anything) expect(model).to receive(:add_column).with(:foo, :deleted_at, :datetime_with_timezone, anything) + expect(model).to receive(:add_column).with(:foo, :processed_at, :datetime_with_timezone, anything) expect(model).not_to receive(:add_column).with(:foo, :updated_at, :datetime_with_timezone, anything) - model.add_timestamps_with_timezone(:foo, columns: [:created_at, :deleted_at]) + model.add_timestamps_with_timezone(:foo, columns: [:created_at, :deleted_at, :processed_at]) end it 'cannot add unacceptable column names' do @@ -74,29 +70,6 @@ RSpec.describe Gitlab::Database::MigrationHelpers do model.add_timestamps_with_timezone(:foo, columns: [:bar]) end.to raise_error %r/Illegal timestamp column name/ end - - context 'in a transaction' do - let(:in_transaction) { true } - - before do - allow(model).to receive(:add_column).with(any_args).and_call_original - allow(model).to receive(:add_column) - .with(:foo, anything, :datetime_with_timezone, anything) - .and_return(nil) - end - - it 'cannot add a default value' do - expect do - model.add_timestamps_with_timezone(:foo, default: :i_cause_an_error) - end.to raise_error %r/add_timestamps_with_timezone/ - end - - it 'can add columns without defaults' do - expect do - model.add_timestamps_with_timezone(:foo) - end.not_to raise_error - end - end end describe '#create_table_with_constraints' do @@ -271,12 +244,92 @@ RSpec.describe Gitlab::Database::MigrationHelpers do model.add_concurrent_index(:users, :foo, unique: true) end - it 'does nothing if the index exists already' do - expect(model).to receive(:index_exists?) - .with(:users, :foo, { algorithm: :concurrently, unique: true }).and_return(true) - expect(model).not_to receive(:add_index) + context 'when the index exists and is valid' do + before do + model.add_index :users, :id, unique: true + end - model.add_concurrent_index(:users, :foo, unique: true) + it 'does leaves the existing index' do + expect(model).to receive(:index_exists?) + .with(:users, :id, { algorithm: :concurrently, unique: true }).and_call_original + + expect(model).not_to receive(:remove_index) + expect(model).not_to receive(:add_index) + + model.add_concurrent_index(:users, :id, unique: true) + end + end + + context 'when an invalid copy of the index exists' do + before do + model.add_index :users, :id, unique: true, name: index_name + + model.connection.execute(<<~SQL) + UPDATE pg_index + SET indisvalid = false + WHERE indexrelid = '#{index_name}'::regclass + SQL + end + + context 'when the default name is used' do + let(:index_name) { model.index_name(:users, :id) } + + it 'drops and recreates the index' do + expect(model).to receive(:index_exists?) + .with(:users, :id, { algorithm: :concurrently, unique: true }).and_call_original + expect(model).to receive(:index_invalid?).with(index_name, schema: nil).and_call_original + + expect(model).to receive(:remove_concurrent_index_by_name).with(:users, index_name) + + expect(model).to receive(:add_index) + .with(:users, :id, { algorithm: :concurrently, unique: true }) + + model.add_concurrent_index(:users, :id, unique: true) + end + end + + context 'when a custom name is used' do + let(:index_name) { 'my_test_index' } + + it 'drops and recreates the index' do + expect(model).to receive(:index_exists?) + .with(:users, :id, { algorithm: :concurrently, unique: true, name: index_name }).and_call_original + expect(model).to receive(:index_invalid?).with(index_name, schema: nil).and_call_original + + expect(model).to receive(:remove_concurrent_index_by_name).with(:users, index_name) + + expect(model).to receive(:add_index) + .with(:users, :id, { algorithm: :concurrently, unique: true, name: index_name }) + + model.add_concurrent_index(:users, :id, unique: true, name: index_name) + end + end + + context 'when a qualified table name is used' do + let(:other_schema) { 'foo_schema' } + let(:index_name) { 'my_test_index' } + let(:table_name) { "#{other_schema}.users" } + + before do + model.connection.execute(<<~SQL) + CREATE SCHEMA #{other_schema}; + ALTER TABLE users SET SCHEMA #{other_schema}; + SQL + end + + it 'drops and recreates the index' do + expect(model).to receive(:index_exists?) + .with(table_name, :id, { algorithm: :concurrently, unique: true, name: index_name }).and_call_original + expect(model).to receive(:index_invalid?).with(index_name, schema: other_schema).and_call_original + + expect(model).to receive(:remove_concurrent_index_by_name).with(table_name, index_name) + + expect(model).to receive(:add_index) + .with(table_name, :id, { algorithm: :concurrently, unique: true, name: index_name }) + + model.add_concurrent_index(table_name, :id, unique: true, name: index_name) + end + end end it 'unprepares the async index creation' do diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb index 1a7116e75e5..e42a6c970ea 100644 --- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb +++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb @@ -583,12 +583,33 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do end describe '#finalized_background_migration' do - include_context 'background migration job class' + let(:job_coordinator) { Gitlab::BackgroundMigration::JobCoordinator.new(:main, BackgroundMigrationWorker) } + + let!(:job_class_name) { 'TestJob' } + let!(:job_class) { Class.new } + let!(:job_perform_method) do + ->(*arguments) do + Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( + # Value is 'TestJob' defined by :job_class_name in the let! above. + # Scoping prohibits us from directly referencing job_class_name. + RSpec.current_example.example_group_instance.job_class_name, + arguments + ) + end + end let!(:tracked_pending_job) { create(:background_migration_job, class_name: job_class_name, status: :pending, arguments: [1]) } let!(:tracked_successful_job) { create(:background_migration_job, class_name: job_class_name, status: :succeeded, arguments: [2]) } before do + job_class.define_method(:perform, job_perform_method) + + allow(Gitlab::BackgroundMigration).to receive(:coordinator_for_database) + .with(:main).and_return(job_coordinator) + + expect(job_coordinator).to receive(:migration_class_for) + .with(job_class_name).at_least(:once) { job_class } + Sidekiq::Testing.disable! do BackgroundMigrationWorker.perform_async(job_class_name, [1, 2]) BackgroundMigrationWorker.perform_async(job_class_name, [3, 4]) diff --git a/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb new file mode 100644 index 00000000000..e65f89747c4 --- /dev/null +++ b/spec/lib/gitlab/database/migrations/observers/transaction_duration_spec.rb @@ -0,0 +1,106 @@ +# frozen_string_literal: true +require 'spec_helper' + +RSpec.describe Gitlab::Database::Migrations::Observers::TransactionDuration do + subject(:transaction_duration_observer) { described_class.new(observation, directory_path) } + + let(:observation) { Gitlab::Database::Migrations::Observation.new(migration_version, migration_name) } + let(:directory_path) { Dir.mktmpdir } + let(:log_file) { "#{directory_path}/#{migration_version}_#{migration_name}-transaction-duration.json" } + let(:transaction_duration) { Gitlab::Json.parse(File.read(log_file)) } + let(:migration_version) { 20210422152437 } + let(:migration_name) { 'test' } + + after do + FileUtils.remove_entry(directory_path) + end + + it 'records real and sub transactions duration', :delete do + observe + + entry = transaction_duration[0] + start_time, end_time, transaction_type = entry.values_at('start_time', 'end_time', 'transaction_type') + start_time = DateTime.parse(start_time) + end_time = DateTime.parse(end_time) + + aggregate_failures do + expect(transaction_duration.size).to eq(3) + expect(start_time).to be_before(end_time) + expect(transaction_type).not_to be_nil + end + end + + context 'when there are sub-transactions' do + it 'records transaction duration' do + observe_sub_transaction + + expect(transaction_duration.size).to eq(1) + + entry = transaction_duration[0]['transaction_type'] + + expect(entry).to eql 'sub_transaction' + end + end + + context 'when there are real-transactions' do + it 'records transaction duration', :delete do + observe_real_transaction + + expect(transaction_duration.size).to eq(1) + + entry = transaction_duration[0]['transaction_type'] + + expect(entry).to eql 'real_transaction' + end + end + + private + + def observe + transaction_duration_observer.before + run_transaction + transaction_duration_observer.after + transaction_duration_observer.record + end + + def observe_sub_transaction + transaction_duration_observer.before + run_sub_transactions + transaction_duration_observer.after + transaction_duration_observer.record + end + + def observe_real_transaction + transaction_duration_observer.before + run_real_transactions + transaction_duration_observer.after + transaction_duration_observer.record + end + + def run_real_transactions + ActiveRecord::Base.transaction do + end + end + + def run_sub_transactions + ActiveRecord::Base.transaction(requires_new: true) do + end + end + + def run_transaction + ActiveRecord::Base.connection_pool.with_connection do |connection| + Gitlab::Database::SharedModel.using_connection(connection) do + Gitlab::Database::SharedModel.transaction do + Gitlab::Database::SharedModel.transaction(requires_new: true) do + Gitlab::Database::SharedModel.transaction do + Gitlab::Database::SharedModel.transaction do + Gitlab::Database::SharedModel.transaction(requires_new: true) do + end + end + end + end + end + end + end + end +end diff --git a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb index 8c406c90e36..b2c4e4b54a4 100644 --- a/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb +++ b/spec/lib/gitlab/database/partitioning/detached_partition_dropper_spec.rb @@ -5,6 +5,8 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do include Database::TableSchemaHelpers + subject(:dropper) { described_class.new } + let(:connection) { ActiveRecord::Base.connection } def expect_partition_present(name) @@ -23,10 +25,18 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do before do connection.execute(<<~SQL) + CREATE TABLE referenced_table ( + id bigserial primary key not null + ) + SQL + connection.execute(<<~SQL) + CREATE TABLE parent_table ( id bigserial not null, + referenced_id bigint not null, created_at timestamptz not null, - primary key (id, created_at) + primary key (id, created_at), + constraint fk_referenced foreign key (referenced_id) references referenced_table(id) ) PARTITION BY RANGE(created_at) SQL end @@ -59,7 +69,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do attached: false, drop_after: 1.day.from_now) - subject.perform + dropper.perform expect_partition_present('test_partition') end @@ -75,7 +85,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do end it 'drops the partition' do - subject.perform + dropper.perform expect(table_oid('test_partition')).to be_nil end @@ -86,16 +96,62 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do end it 'does not drop the partition' do - subject.perform + dropper.perform expect(table_oid('test_partition')).not_to be_nil end end + context 'removing foreign keys' do + it 'removes foreign keys from the table before dropping it' do + expect(dropper).to receive(:drop_detached_partition).and_wrap_original do |drop_method, partition_name| + expect(partition_name).to eq('test_partition') + expect(foreign_key_exists_by_name(partition_name, 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_falsey + + drop_method.call(partition_name) + end + + expect(foreign_key_exists_by_name('test_partition', 'fk_referenced', schema: Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA)).to be_truthy + + dropper.perform + end + + it 'does not remove foreign keys from the parent table' do + expect { dropper.perform }.not_to change { foreign_key_exists_by_name('parent_table', 'fk_referenced') }.from(true) + end + + context 'when another process drops the foreign key' do + it 'skips dropping that foreign key' do + expect(dropper).to receive(:drop_foreign_key_if_present).and_wrap_original do |drop_meth, *args| + connection.execute('alter table gitlab_partitions_dynamic.test_partition drop constraint fk_referenced;') + drop_meth.call(*args) + end + + dropper.perform + + expect_partition_removed('test_partition') + end + end + + context 'when another process drops the partition' do + it 'skips dropping the foreign key' do + expect(dropper).to receive(:drop_foreign_key_if_present).and_wrap_original do |drop_meth, *args| + connection.execute('drop table gitlab_partitions_dynamic.test_partition') + Postgresql::DetachedPartition.where(table_name: 'test_partition').delete_all + end + + expect(Gitlab::AppLogger).not_to receive(:error) + dropper.perform + end + end + end + context 'when another process drops the table while the first waits for a lock' do it 'skips the table' do + # First call to .lock is for removing foreign keys + expect(Postgresql::DetachedPartition).to receive(:lock).once.ordered.and_call_original # Rspec's receive_method_chain does not support .and_wrap_original, so we need to nest here. - expect(Postgresql::DetachedPartition).to receive(:lock).and_wrap_original do |lock_meth| + expect(Postgresql::DetachedPartition).to receive(:lock).once.ordered.and_wrap_original do |lock_meth| locked = lock_meth.call expect(locked).to receive(:find_by).and_wrap_original do |find_meth, *find_args| # Another process drops the table then deletes this entry @@ -106,9 +162,9 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do locked end - expect(subject).not_to receive(:drop_one) + expect(dropper).not_to receive(:drop_one) - subject.perform + dropper.perform end end end @@ -123,19 +179,26 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do end it 'does not drop the partition, but does remove the DetachedPartition entry' do - subject.perform + dropper.perform aggregate_failures do expect(table_oid('test_partition')).not_to be_nil expect(Postgresql::DetachedPartition.find_by(table_name: 'test_partition')).to be_nil end end - it 'removes the detached_partition entry' do - detached_partition = Postgresql::DetachedPartition.find_by!(table_name: 'test_partition') + context 'when another process removes the entry before this process' do + it 'does nothing' do + expect(Postgresql::DetachedPartition).to receive(:lock).and_wrap_original do |lock_meth| + Postgresql::DetachedPartition.delete_all + lock_meth.call + end - subject.perform + expect(Gitlab::AppLogger).not_to receive(:error) - expect(Postgresql::DetachedPartition.exists?(id: detached_partition.id)).to be_falsey + dropper.perform + + expect(table_oid('test_partition')).not_to be_nil + end end end @@ -155,7 +218,7 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do end it 'drops both partitions' do - subject.perform + dropper.perform expect_partition_removed('partition_1') expect_partition_removed('partition_2') @@ -163,10 +226,10 @@ RSpec.describe Gitlab::Database::Partitioning::DetachedPartitionDropper do context 'when the first drop returns an error' do it 'still drops the second partition' do - expect(subject).to receive(:drop_detached_partition).ordered.and_raise('injected error') - expect(subject).to receive(:drop_detached_partition).ordered.and_call_original + expect(dropper).to receive(:drop_detached_partition).ordered.and_raise('injected error') + expect(dropper).to receive(:drop_detached_partition).ordered.and_call_original - subject.perform + dropper.perform # We don't know which partition we tried to drop first, so the tests here have to work with either one expect(Postgresql::DetachedPartition.count).to eq(1) diff --git a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb index 27ada12b067..67d80d71e2a 100644 --- a/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb +++ b/spec/lib/gitlab/database/partitioning/monthly_strategy_spec.rb @@ -10,7 +10,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do let(:model) { double('model', table_name: table_name) } let(:partitioning_key) { double } - let(:table_name) { :partitioned_test } + let(:table_name) { :_test_partitioned_test } before do connection.execute(<<~SQL) @@ -18,11 +18,11 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do (id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at)) PARTITION BY RANGE (created_at); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_000000 PARTITION OF #{table_name} FOR VALUES FROM (MINVALUE) TO ('2020-05-01'); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202005 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202005 PARTITION OF #{table_name} FOR VALUES FROM ('2020-05-01') TO ('2020-06-01'); SQL @@ -30,8 +30,8 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do it 'detects both partitions' do expect(subject).to eq([ - Gitlab::Database::Partitioning::TimePartition.new(table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'), - Gitlab::Database::Partitioning::TimePartition.new(table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005') + Gitlab::Database::Partitioning::TimePartition.new(table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'), + Gitlab::Database::Partitioning::TimePartition.new(table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005') ]) end end @@ -41,7 +41,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do let(:model) do Class.new(ActiveRecord::Base) do - self.table_name = 'partitioned_test' + self.table_name = '_test_partitioned_test' self.primary_key = :id end end @@ -59,11 +59,11 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do (id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at)) PARTITION BY RANGE (created_at); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_000000 PARTITION OF #{model.table_name} FOR VALUES FROM (MINVALUE) TO ('2020-05-01'); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202006 PARTITION OF #{model.table_name} FOR VALUES FROM ('2020-06-01') TO ('2020-07-01'); SQL @@ -166,7 +166,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do (id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at)) PARTITION BY RANGE (created_at); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202006 PARTITION OF #{model.table_name} FOR VALUES FROM ('2020-06-01') TO ('2020-07-01'); SQL @@ -181,13 +181,13 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do describe '#extra_partitions' do let(:model) do Class.new(ActiveRecord::Base) do - self.table_name = 'partitioned_test' + self.table_name = '_test_partitioned_test' self.primary_key = :id end end let(:partitioning_key) { :created_at } - let(:table_name) { :partitioned_test } + let(:table_name) { :_test_partitioned_test } around do |example| travel_to(Date.parse('2020-08-22')) { example.run } @@ -200,15 +200,15 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do (id serial not null, created_at timestamptz not null, PRIMARY KEY (id, created_at)) PARTITION BY RANGE (created_at); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_000000 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_000000 PARTITION OF #{table_name} FOR VALUES FROM (MINVALUE) TO ('2020-05-01'); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202005 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202005 PARTITION OF #{table_name} FOR VALUES FROM ('2020-05-01') TO ('2020-06-01'); - CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.partitioned_test_202006 + CREATE TABLE #{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_partitioned_test_202006 PARTITION OF #{table_name} FOR VALUES FROM ('2020-06-01') TO ('2020-07-01') SQL @@ -235,7 +235,7 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do it 'prunes the unbounded partition ending 2020-05-01' do min_value_to_may = Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', - partition_name: 'partitioned_test_000000') + partition_name: '_test_partitioned_test_000000') expect(subject).to contain_exactly(min_value_to_may) end @@ -246,8 +246,8 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do it 'prunes the unbounded partition and the partition for May-June' do expect(subject).to contain_exactly( - Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'), - Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005') + Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'), + Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005') ) end @@ -256,16 +256,16 @@ RSpec.describe Gitlab::Database::Partitioning::MonthlyStrategy do it 'prunes empty partitions' do expect(subject).to contain_exactly( - Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000'), - Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: 'partitioned_test_202005') + Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000'), + Gitlab::Database::Partitioning::TimePartition.new(model.table_name, '2020-05-01', '2020-06-01', partition_name: '_test_partitioned_test_202005') ) end it 'does not prune non-empty partitions' do - connection.execute("INSERT INTO #{table_name} (created_at) VALUES (('2020-05-15'))") # inserting one record into partitioned_test_202005 + connection.execute("INSERT INTO #{table_name} (created_at) VALUES (('2020-05-15'))") # inserting one record into _test_partitioned_test_202005 expect(subject).to contain_exactly( - Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: 'partitioned_test_000000') + Gitlab::Database::Partitioning::TimePartition.new(model.table_name, nil, '2020-05-01', partition_name: '_test_partitioned_test_000000') ) end end diff --git a/spec/lib/gitlab/database/partitioning/multi_database_partition_dropper_spec.rb b/spec/lib/gitlab/database/partitioning/multi_database_partition_dropper_spec.rb deleted file mode 100644 index 56d6ebb7aff..00000000000 --- a/spec/lib/gitlab/database/partitioning/multi_database_partition_dropper_spec.rb +++ /dev/null @@ -1,38 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::Partitioning::MultiDatabasePartitionDropper, '#drop_detached_partitions' do - subject(:drop_detached_partitions) { multi_db_dropper.drop_detached_partitions } - - let(:multi_db_dropper) { described_class.new } - - let(:connection_wrapper1) { double(scope: scope1) } - let(:connection_wrapper2) { double(scope: scope2) } - - let(:scope1) { double(connection: connection1) } - let(:scope2) { double(connection: connection2) } - - let(:connection1) { double('connection') } - let(:connection2) { double('connection') } - - let(:dropper_class) { Gitlab::Database::Partitioning::DetachedPartitionDropper } - let(:dropper1) { double('partition dropper') } - let(:dropper2) { double('partition dropper') } - - before do - allow(multi_db_dropper).to receive(:databases).and_return({ db1: connection_wrapper1, db2: connection_wrapper2 }) - end - - it 'drops detached partitions for each database' do - expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(connection1).and_yield.ordered - expect(dropper_class).to receive(:new).and_return(dropper1).ordered - expect(dropper1).to receive(:perform) - - expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(connection2).and_yield.ordered - expect(dropper_class).to receive(:new).and_return(dropper2).ordered - expect(dropper2).to receive(:perform) - - drop_detached_partitions - end -end diff --git a/spec/lib/gitlab/database/partitioning/multi_database_partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/multi_database_partition_manager_spec.rb deleted file mode 100644 index 3c94c1bf4ea..00000000000 --- a/spec/lib/gitlab/database/partitioning/multi_database_partition_manager_spec.rb +++ /dev/null @@ -1,36 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Database::Partitioning::MultiDatabasePartitionManager, '#sync_partitions' do - subject(:sync_partitions) { manager.sync_partitions } - - let(:manager) { described_class.new(models) } - let(:models) { [model1, model2] } - - let(:model1) { double('model1', connection: connection1, table_name: 'table1') } - let(:model2) { double('model2', connection: connection1, table_name: 'table2') } - - let(:connection1) { double('connection1') } - let(:connection2) { double('connection2') } - - let(:target_manager_class) { Gitlab::Database::Partitioning::PartitionManager } - let(:target_manager1) { double('partition manager') } - let(:target_manager2) { double('partition manager') } - - before do - allow(manager).to receive(:connection_name).and_return('name') - end - - it 'syncs model partitions, setting up the appropriate connection for each', :aggregate_failures do - expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(model1.connection).and_yield.ordered - expect(target_manager_class).to receive(:new).with(model1).and_return(target_manager1).ordered - expect(target_manager1).to receive(:sync_partitions) - - expect(Gitlab::Database::SharedModel).to receive(:using_connection).with(model2.connection).and_yield.ordered - expect(target_manager_class).to receive(:new).with(model2).and_return(target_manager2).ordered - expect(target_manager2).to receive(:sync_partitions) - - sync_partitions - end -end diff --git a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb index 7c4cfcfb3a9..1c6f5c5c694 100644 --- a/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb +++ b/spec/lib/gitlab/database/partitioning/partition_manager_spec.rb @@ -195,7 +195,7 @@ RSpec.describe Gitlab::Database::Partitioning::PartitionManager do end # Postgres 11 does not support foreign keys to partitioned tables - if Gitlab::Database.main.version.to_f >= 12 + if ApplicationRecord.database.version.to_f >= 12 context 'when the model is the target of a foreign key' do before do connection.execute(<<~SQL) diff --git a/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb b/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb index 7024cbd55ff..006ce8a7f48 100644 --- a/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb +++ b/spec/lib/gitlab/database/partitioning/partition_monitoring_spec.rb @@ -4,9 +4,8 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Partitioning::PartitionMonitoring do describe '#report_metrics' do - subject { described_class.new(models).report_metrics } + subject { described_class.new.report_metrics_for_model(model) } - let(:models) { [model] } let(:model) { double(partitioning_strategy: partitioning_strategy, table_name: table) } let(:partitioning_strategy) { double(missing_partitions: missing_partitions, current_partitions: current_partitions, extra_partitions: extra_partitions) } let(:table) { "some_table" } diff --git a/spec/lib/gitlab/database/partitioning/replace_table_spec.rb b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb index 8e27797208c..fdf514b519f 100644 --- a/spec/lib/gitlab/database/partitioning/replace_table_spec.rb +++ b/spec/lib/gitlab/database/partitioning/replace_table_spec.rb @@ -5,7 +5,9 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Partitioning::ReplaceTable, '#perform' do include Database::TableSchemaHelpers - subject(:replace_table) { described_class.new(original_table, replacement_table, archived_table, 'id').perform } + subject(:replace_table) do + described_class.new(connection, original_table, replacement_table, archived_table, 'id').perform + end let(:original_table) { '_test_original_table' } let(:replacement_table) { '_test_replacement_table' } diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb index 486af9413e8..154cc2b7972 100644 --- a/spec/lib/gitlab/database/partitioning_spec.rb +++ b/spec/lib/gitlab/database/partitioning_spec.rb @@ -3,52 +3,175 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Partitioning do + include Database::PartitioningHelpers + include Database::TableSchemaHelpers + + let(:connection) { ApplicationRecord.connection } + + around do |example| + previously_registered_models = described_class.registered_models.dup + described_class.instance_variable_set('@registered_models', Set.new) + + previously_registered_tables = described_class.registered_tables.dup + described_class.instance_variable_set('@registered_tables', Set.new) + + example.run + + described_class.instance_variable_set('@registered_models', previously_registered_models) + described_class.instance_variable_set('@registered_tables', previously_registered_tables) + end + + describe '.register_models' do + context 'ensure that the registered models have partitioning strategy' do + it 'fails when partitioning_strategy is not specified for the model' do + model = Class.new(ApplicationRecord) + expect { described_class.register_models([model]) }.to raise_error /should have partitioning strategy defined/ + end + end + end + + describe '.sync_partitions_ignore_db_error' do + it 'calls sync_partitions' do + expect(described_class).to receive(:sync_partitions) + + described_class.sync_partitions_ignore_db_error + end + + [ActiveRecord::ActiveRecordError, PG::Error].each do |error| + context "when #{error} is raised" do + before do + expect(described_class).to receive(:sync_partitions) + .and_raise(error) + end + + it 'ignores it' do + described_class.sync_partitions_ignore_db_error + end + end + end + + context 'when DISABLE_POSTGRES_PARTITION_CREATION_ON_STARTUP is set' do + before do + stub_env('DISABLE_POSTGRES_PARTITION_CREATION_ON_STARTUP', '1') + end + + it 'does not call sync_partitions' do + expect(described_class).to receive(:sync_partitions).never + + described_class.sync_partitions_ignore_db_error + end + end + end + describe '.sync_partitions' do - let(:partition_manager_class) { described_class::MultiDatabasePartitionManager } - let(:partition_manager) { double('partition manager') } + let(:table_names) { %w[partitioning_test1 partitioning_test2] } + let(:models) do + table_names.map do |table_name| + Class.new(ApplicationRecord) do + include PartitionedTable + + self.table_name = table_name + partitioned_by :created_at, strategy: :monthly + end + end + end + + before do + table_names.each do |table_name| + connection.execute(<<~SQL) + CREATE TABLE #{table_name} ( + id serial not null, + created_at timestamptz not null, + PRIMARY KEY (id, created_at)) + PARTITION BY RANGE (created_at); + SQL + end + end + + it 'manages partitions for each given model' do + expect { described_class.sync_partitions(models)} + .to change { find_partitions(table_names.first).size }.from(0) + .and change { find_partitions(table_names.last).size }.from(0) + end context 'when no partitioned models are given' do - it 'calls the partition manager with the registered models' do - expect(partition_manager_class).to receive(:new) - .with(described_class.registered_models) - .and_return(partition_manager) + it 'manages partitions for each registered model' do + described_class.register_models([models.first]) + described_class.register_tables([ + { + table_name: table_names.last, + partitioned_column: :created_at, strategy: :monthly + } + ]) - expect(partition_manager).to receive(:sync_partitions) + expect { described_class.sync_partitions } + .to change { find_partitions(table_names.first).size }.from(0) + .and change { find_partitions(table_names.last).size }.from(0) + end + end + end + + describe '.report_metrics' do + let(:model1) { double('model') } + let(:model2) { double('model') } + + let(:partition_monitoring_class) { described_class::PartitionMonitoring } + + context 'when no partitioned models are given' do + it 'reports metrics for each registered model' do + expect_next_instance_of(partition_monitoring_class) do |partition_monitor| + expect(partition_monitor).to receive(:report_metrics_for_model).with(model1) + expect(partition_monitor).to receive(:report_metrics_for_model).with(model2) + end + + expect(Gitlab::Database::EachDatabase).to receive(:each_model_connection) + .with(described_class.__send__(:registered_models)) + .and_yield(model1) + .and_yield(model2) - described_class.sync_partitions + described_class.report_metrics end end context 'when partitioned models are given' do - it 'calls the partition manager with the given models' do - models = ['my special model'] + it 'reports metrics for each given model' do + expect_next_instance_of(partition_monitoring_class) do |partition_monitor| + expect(partition_monitor).to receive(:report_metrics_for_model).with(model1) + expect(partition_monitor).to receive(:report_metrics_for_model).with(model2) + end - expect(partition_manager_class).to receive(:new) - .with(models) - .and_return(partition_manager) + expect(Gitlab::Database::EachDatabase).to receive(:each_model_connection) + .with([model1, model2]) + .and_yield(model1) + .and_yield(model2) - expect(partition_manager).to receive(:sync_partitions) - - described_class.sync_partitions(models) + described_class.report_metrics([model1, model2]) end end end describe '.drop_detached_partitions' do - let(:partition_dropper_class) { described_class::MultiDatabasePartitionDropper } + let(:table_names) { %w[detached_test_partition1 detached_test_partition2] } + + before do + table_names.each do |table_name| + connection.create_table("#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}.#{table_name}") - it 'delegates to the partition dropper' do - expect_next_instance_of(partition_dropper_class) do |partition_dropper| - expect(partition_dropper).to receive(:drop_detached_partitions) + Postgresql::DetachedPartition.create!(table_name: table_name, drop_after: 1.year.ago) end + end - described_class.drop_detached_partitions + it 'drops detached partitions for each database' do + expect(Gitlab::Database::EachDatabase).to receive(:each_database_connection).and_yield + + expect { described_class.drop_detached_partitions } + .to change { Postgresql::DetachedPartition.count }.from(2).to(0) + .and change { table_exists?(table_names.first) }.from(true).to(false) + .and change { table_exists?(table_names.last) }.from(true).to(false) end - end - context 'ensure that the registered models have partitioning strategy' do - it 'fails when partitioning_strategy is not specified for the model' do - expect(described_class.registered_models).to all(respond_to(:partitioning_strategy)) + def table_exists?(table_name) + table_oid(table_name).present? end end end diff --git a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb index ec39e5bfee7..b0e08ca1e67 100644 --- a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb +++ b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb @@ -38,4 +38,16 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model do expect(described_class.by_referenced_table_identifier('public.referenced_table')).to contain_exactly(expected) end end + + describe '#by_constrained_table_identifier' do + it 'throws an error when the identifier name is not fully qualified' do + expect { described_class.by_constrained_table_identifier('constrained_table') }.to raise_error(ArgumentError, /not fully qualified/) + end + + it 'finds the foreign keys for the constrained table' do + expected = described_class.where(name: %w[fk_constrained_to_referenced fk_constrained_to_other_referenced]).to_a + + expect(described_class.by_constrained_table_identifier('public.constrained_table')).to match_array(expected) + end + end end diff --git a/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb b/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb index 2c550f14a08..c9bbc32e059 100644 --- a/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb +++ b/spec/lib/gitlab/database/postgres_hll/batch_distinct_counter_spec.rb @@ -21,7 +21,7 @@ RSpec.describe Gitlab::Database::PostgresHll::BatchDistinctCounter do end before do - allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(in_transaction) + allow(model.connection).to receive(:transaction_open?).and_return(in_transaction) end context 'unit test for different counting parameters' do diff --git a/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb b/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb index da4422bd442..13ac9190ab7 100644 --- a/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb +++ b/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb @@ -13,6 +13,8 @@ RSpec.describe Gitlab::Database::PostgresIndexBloatEstimate do let(:identifier) { 'public.schema_migrations_pkey' } + it { is_expected.to be_a Gitlab::Database::SharedModel } + describe '#bloat_size' do it 'returns the bloat size in bytes' do # We cannot reach much more about the bloat size estimate here diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb index 9088719d5a4..db66736676b 100644 --- a/spec/lib/gitlab/database/postgres_index_spec.rb +++ b/spec/lib/gitlab/database/postgres_index_spec.rb @@ -22,6 +22,8 @@ RSpec.describe Gitlab::Database::PostgresIndex do it_behaves_like 'a postgres model' + it { is_expected.to be_a Gitlab::Database::SharedModel } + describe '.reindexing_support' do it 'only non partitioned indexes' do expect(described_class.reindexing_support).to all(have_attributes(partitioned: false)) diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb new file mode 100644 index 00000000000..82a1c7143d5 --- /dev/null +++ b/spec/lib/gitlab/database/query_analyzer_spec.rb @@ -0,0 +1,144 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do + let(:analyzer) { double(:query_analyzer) } + let(:disabled_analyzer) { double(:disabled_query_analyzer) } + + before do + allow(described_class.instance).to receive(:all_analyzers).and_return([analyzer, disabled_analyzer]) + allow(analyzer).to receive(:enabled?).and_return(true) + allow(analyzer).to receive(:suppressed?).and_return(false) + allow(analyzer).to receive(:begin!) + allow(analyzer).to receive(:end!) + allow(disabled_analyzer).to receive(:enabled?).and_return(false) + end + + context 'the hook is enabled by default in specs' do + it 'does process queries and gets normalized SQL' do + expect(analyzer).to receive(:enabled?).and_return(true) + expect(analyzer).to receive(:analyze) do |parsed| + expect(parsed.sql).to include("SELECT $1 FROM projects") + expect(parsed.pg.tables).to eq(%w[projects]) + end + + described_class.instance.within do + Project.connection.execute("SELECT 1 FROM projects") + end + end + + it 'does prevent recursive execution' do + expect(analyzer).to receive(:enabled?).and_return(true) + expect(analyzer).to receive(:analyze) do + Project.connection.execute("SELECT 1 FROM projects") + end + + described_class.instance.within do + Project.connection.execute("SELECT 1 FROM projects") + end + end + end + + describe '#within' do + context 'when it is already initialized' do + around do |example| + described_class.instance.within do + example.run + end + end + + it 'does not evaluate enabled? again do yield block' do + expect(analyzer).not_to receive(:enabled?) + + expect { |b| described_class.instance.within(&b) }.to yield_control + end + end + + context 'when initializer is enabled' do + before do + expect(analyzer).to receive(:enabled?).and_return(true) + end + + it 'calls begin! and end!' do + expect(analyzer).to receive(:begin!) + expect(analyzer).to receive(:end!) + + expect { |b| described_class.instance.within(&b) }.to yield_control + end + + it 'when begin! raises the end! is not called' do + expect(analyzer).to receive(:begin!).and_raise('exception') + expect(analyzer).not_to receive(:end!) + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + + expect { |b| described_class.instance.within(&b) }.to yield_control + end + end + end + + describe '#process_sql' do + it 'does not analyze query if not enabled' do + expect(analyzer).to receive(:enabled?).and_return(false) + expect(analyzer).not_to receive(:analyze) + + process_sql("SELECT 1 FROM projects") + end + + it 'does analyze query if enabled' do + expect(analyzer).to receive(:enabled?).and_return(true) + expect(analyzer).to receive(:analyze) do |parsed| + expect(parsed.sql).to eq("SELECT $1 FROM projects") + expect(parsed.pg.tables).to eq(%w[projects]) + end + + process_sql("SELECT 1 FROM projects") + end + + it 'does track exception if query cannot be parsed' do + expect(analyzer).to receive(:enabled?).and_return(true) + expect(analyzer).not_to receive(:analyze) + expect(Gitlab::ErrorTracking).to receive(:track_exception) + + expect { process_sql("invalid query") }.not_to raise_error + end + + it 'does track exception if analyzer raises exception on enabled?' do + expect(analyzer).to receive(:enabled?).and_raise('exception') + expect(analyzer).not_to receive(:analyze) + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + + expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error + end + + it 'does track exception if analyzer raises exception on analyze' do + expect(analyzer).to receive(:enabled?).and_return(true) + expect(analyzer).to receive(:analyze).and_raise('exception') + expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + + expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error + end + + it 'does call analyze only on enabled initializers' do + expect(analyzer).to receive(:analyze) + expect(disabled_analyzer).not_to receive(:analyze) + + expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error + end + + it 'does not call analyze on suppressed analyzers' do + expect(analyzer).to receive(:suppressed?).and_return(true) + expect(analyzer).not_to receive(:analyze) + + expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error + end + + def process_sql(sql) + described_class.instance.within do + ApplicationRecord.load_balancer.read_write do |connection| + described_class.instance.process_sql(sql, connection) + end + end + end + end +end diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb new file mode 100644 index 00000000000..ab5f05e3ec4 --- /dev/null +++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_analyzers: false do + let(:analyzer) { described_class } + + before do + allow(Gitlab::Database::QueryAnalyzer.instance).to receive(:all_analyzers).and_return([analyzer]) + end + + it 'does not increment metrics if feature flag is disabled' do + stub_feature_flags(query_analyzer_gitlab_schema_metrics: false) + + expect(analyzer).not_to receive(:analyze) + + process_sql(ActiveRecord::Base, "SELECT 1 FROM projects") + end + + context 'properly observes all queries', :mocked_ci_connection do + using RSpec::Parameterized::TableSyntax + + where do + { + "for simple query observes schema correctly" => { + model: ApplicationRecord, + sql: "SELECT 1 FROM projects", + expectations: { + gitlab_schemas: "gitlab_main", + db_config_name: "main" + } + }, + "for query accessing gitlab_ci and gitlab_main" => { + model: ApplicationRecord, + sql: "SELECT 1 FROM projects LEFT JOIN ci_builds ON ci_builds.project_id=projects.id", + expectations: { + gitlab_schemas: "gitlab_ci,gitlab_main", + db_config_name: "main" + } + }, + "for query accessing gitlab_ci and gitlab_main the gitlab_schemas is always ordered" => { + model: ApplicationRecord, + sql: "SELECT 1 FROM ci_builds LEFT JOIN projects ON ci_builds.project_id=projects.id", + expectations: { + gitlab_schemas: "gitlab_ci,gitlab_main", + db_config_name: "main" + } + }, + "for query accessing CI database" => { + model: Ci::ApplicationRecord, + sql: "SELECT 1 FROM ci_builds", + expectations: { + gitlab_schemas: "gitlab_ci", + db_config_name: "ci" + } + } + } + end + + with_them do + around do |example| + Gitlab::Database::QueryAnalyzer.instance.within { example.run } + end + + it do + expect(described_class.schemas_metrics).to receive(:increment) + .with(expectations).and_call_original + + process_sql(model, sql) + end + end + end + + def process_sql(model, sql) + Gitlab::Database::QueryAnalyzer.instance.within do + # Skip load balancer and retrieve connection assigned to model + Gitlab::Database::QueryAnalyzer.instance.process_sql(sql, model.retrieve_connection) + end + end +end diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb new file mode 100644 index 00000000000..eb8ccb0bd89 --- /dev/null +++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb @@ -0,0 +1,167 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification, query_analyzers: false do + let_it_be(:pipeline, refind: true) { create(:ci_pipeline) } + let_it_be(:project, refind: true) { create(:project) } + + before do + allow(Gitlab::Database::QueryAnalyzer.instance).to receive(:all_analyzers).and_return([described_class]) + end + + around do |example| + Gitlab::Database::QueryAnalyzer.instance.within { example.run } + end + + shared_examples 'successful examples' do + context 'outside transaction' do + it { expect { run_queries }.not_to raise_error } + end + + context 'within transaction' do + it do + Project.transaction do + expect { run_queries }.not_to raise_error + end + end + end + + context 'within nested transaction' do + it do + Project.transaction(requires_new: true) do + Project.transaction(requires_new: true) do + expect { run_queries }.not_to raise_error + end + end + end + end + end + + context 'when CI and other tables are read in a transaction' do + def run_queries + pipeline.reload + project.reload + end + + include_examples 'successful examples' + end + + context 'when only CI data is modified' do + def run_queries + pipeline.touch + project.reload + end + + include_examples 'successful examples' + end + + context 'when other data is modified' do + def run_queries + pipeline.reload + project.touch + end + + include_examples 'successful examples' + end + + context 'when both CI and other data is modified' do + def run_queries + project.touch + pipeline.touch + end + + context 'outside transaction' do + it { expect { run_queries }.not_to raise_error } + end + + context 'when data modification happens in a transaction' do + it 'raises error' do + Project.transaction do + expect { run_queries }.to raise_error /Cross-database data modification/ + end + end + + context 'when data modification happens in nested transactions' do + it 'raises error' do + Project.transaction(requires_new: true) do + project.touch + Project.transaction(requires_new: true) do + expect { pipeline.touch }.to raise_error /Cross-database data modification/ + end + end + end + end + end + + context 'when executing a SELECT FOR UPDATE query' do + def run_queries + project.touch + pipeline.lock! + end + + context 'outside transaction' do + it { expect { run_queries }.not_to raise_error } + end + + context 'when data modification happens in a transaction' do + it 'raises error' do + Project.transaction do + expect { run_queries }.to raise_error /Cross-database data modification/ + end + end + + context 'when the modification is inside a factory save! call' do + let(:runner) { create(:ci_runner, :project, projects: [build(:project)]) } + + it 'does not raise an error' do + runner + end + end + end + end + + context 'when CI association is modified through project' do + def run_queries + project.variables.build(key: 'a', value: 'v') + project.save! + end + + include_examples 'successful examples' + end + + describe '.allow_cross_database_modification_within_transaction' do + it 'skips raising error' do + expect do + described_class.allow_cross_database_modification_within_transaction(url: 'gitlab-issue') do + Project.transaction do + pipeline.touch + project.touch + end + end + end.not_to raise_error + end + + it 'skips raising error on factory creation' do + expect do + described_class.allow_cross_database_modification_within_transaction(url: 'gitlab-issue') do + ApplicationRecord.transaction do + create(:ci_pipeline) + end + end + end.not_to raise_error + end + end + end + + context 'when some table with a defined schema and another table with undefined gitlab_schema is modified' do + it 'raises an error including including message about undefined schema' do + expect do + Project.transaction do + project.touch + project.connection.execute('UPDATE foo_bars_undefined_table SET a=1 WHERE id = -1') + end + end.to raise_error /Cross-database data modification.*The gitlab_schema was undefined/ + end + end +end diff --git a/spec/lib/gitlab/database/reflection_spec.rb b/spec/lib/gitlab/database/reflection_spec.rb new file mode 100644 index 00000000000..7c3d797817d --- /dev/null +++ b/spec/lib/gitlab/database/reflection_spec.rb @@ -0,0 +1,280 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Database::Reflection do + let(:database) { described_class.new(ApplicationRecord) } + + describe '#username' do + context 'when a username is set' do + it 'returns the username' do + allow(database).to receive(:config).and_return(username: 'bob') + + expect(database.username).to eq('bob') + end + end + + context 'when a username is not set' do + it 'returns the value of the USER environment variable' do + allow(database).to receive(:config).and_return(username: nil) + allow(ENV).to receive(:[]).with('USER').and_return('bob') + + expect(database.username).to eq('bob') + end + end + end + + describe '#database_name' do + it 'returns the name of the database' do + allow(database).to receive(:config).and_return(database: 'test') + + expect(database.database_name).to eq('test') + end + end + + describe '#adapter_name' do + it 'returns the database adapter name' do + allow(database).to receive(:config).and_return(adapter: 'test') + + expect(database.adapter_name).to eq('test') + end + end + + describe '#human_adapter_name' do + context 'when the adapter is PostgreSQL' do + it 'returns PostgreSQL' do + allow(database).to receive(:config).and_return(adapter: 'postgresql') + + expect(database.human_adapter_name).to eq('PostgreSQL') + end + end + + context 'when the adapter is not PostgreSQL' do + it 'returns Unknown' do + allow(database).to receive(:config).and_return(adapter: 'kittens') + + expect(database.human_adapter_name).to eq('Unknown') + end + end + end + + describe '#postgresql?' do + context 'when using PostgreSQL' do + it 'returns true' do + allow(database).to receive(:adapter_name).and_return('PostgreSQL') + + expect(database.postgresql?).to eq(true) + end + end + + context 'when not using PostgreSQL' do + it 'returns false' do + allow(database).to receive(:adapter_name).and_return('MySQL') + + expect(database.postgresql?).to eq(false) + end + end + end + + describe '#db_read_only?' do + it 'detects a read-only database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => "t" }]) + + expect(database.db_read_only?).to be_truthy + end + + it 'detects a read-only database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => true }]) + + expect(database.db_read_only?).to be_truthy + end + + it 'detects a read-write database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => "f" }]) + + expect(database.db_read_only?).to be_falsey + end + + it 'detects a read-write database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => false }]) + + expect(database.db_read_only?).to be_falsey + end + end + + describe '#db_read_write?' do + it 'detects a read-only database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => "t" }]) + + expect(database.db_read_write?).to eq(false) + end + + it 'detects a read-only database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => true }]) + + expect(database.db_read_write?).to eq(false) + end + + it 'detects a read-write database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => "f" }]) + + expect(database.db_read_write?).to eq(true) + end + + it 'detects a read-write database' do + allow(database.model.connection) + .to receive(:execute) + .with('SELECT pg_is_in_recovery()') + .and_return([{ "pg_is_in_recovery" => false }]) + + expect(database.db_read_write?).to eq(true) + end + end + + describe '#version' do + around do |example| + database.instance_variable_set(:@version, nil) + example.run + database.instance_variable_set(:@version, nil) + end + + context "on postgresql" do + it "extracts the version number" do + allow(database) + .to receive(:database_version) + .and_return("PostgreSQL 9.4.4 on x86_64-apple-darwin14.3.0") + + expect(database.version).to eq '9.4.4' + end + end + + it 'memoizes the result' do + count = ActiveRecord::QueryRecorder + .new { 2.times { database.version } } + .count + + expect(count).to eq(1) + end + end + + describe '#postgresql_minimum_supported_version?' do + it 'returns false when using PostgreSQL 10' do + allow(database).to receive(:version).and_return('10') + + expect(database.postgresql_minimum_supported_version?).to eq(false) + end + + it 'returns false when using PostgreSQL 11' do + allow(database).to receive(:version).and_return('11') + + expect(database.postgresql_minimum_supported_version?).to eq(false) + end + + it 'returns true when using PostgreSQL 12' do + allow(database).to receive(:version).and_return('12') + + expect(database.postgresql_minimum_supported_version?).to eq(true) + end + end + + describe '#cached_column_exists?' do + it 'only retrieves the data from the schema cache' do + database = described_class.new(Project) + queries = ActiveRecord::QueryRecorder.new do + 2.times do + expect(database.cached_column_exists?(:id)).to be_truthy + expect(database.cached_column_exists?(:bogus_column)).to be_falsey + end + end + + expect(queries.count).to eq(0) + end + end + + describe '#cached_table_exists?' do + it 'only retrieves the data from the schema cache' do + dummy = Class.new(ActiveRecord::Base) do + self.table_name = 'bogus_table_name' + end + + queries = ActiveRecord::QueryRecorder.new do + 2.times do + expect(described_class.new(Project).cached_table_exists?).to be_truthy + expect(described_class.new(dummy).cached_table_exists?).to be_falsey + end + end + + expect(queries.count).to eq(0) + end + + it 'returns false when database does not exist' do + database = described_class.new(Project) + + expect(database.model).to receive(:connection) do + raise ActiveRecord::NoDatabaseError, 'broken' + end + + expect(database.cached_table_exists?).to be(false) + end + end + + describe '#exists?' do + it 'returns true if the database exists' do + expect(database.exists?).to be(true) + end + + it "returns false if the database doesn't exist" do + expect(database.model.connection.schema_cache) + .to receive(:database_version) + .and_raise(ActiveRecord::NoDatabaseError) + + expect(database.exists?).to be(false) + end + end + + describe '#system_id' do + it 'returns the PostgreSQL system identifier' do + expect(database.system_id).to be_an_instance_of(Integer) + end + end + + describe '#config' do + it 'returns a HashWithIndifferentAccess' do + expect(database.config) + .to be_an_instance_of(HashWithIndifferentAccess) + end + + it 'returns a default pool size' do + expect(database.config) + .to include(pool: Gitlab::Database.default_pool_size) + end + + it 'does not cache its results' do + a = database.config + b = database.config + + expect(a).not_to equal(b) + end + end +end diff --git a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb index ee3f2b1b415..2ae9037959d 100644 --- a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb +++ b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb @@ -46,14 +46,14 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection do expect(subject).not_to include(excluded.index) end - it 'excludes indexes larger than 100 GB ondisk size' do - excluded = create( + it 'includes indexes larger than 100 GB ondisk size' do + included = create( :postgres_index_bloat_estimate, index: create(:postgres_index, ondisk_size_bytes: 101.gigabytes), bloat_size_bytes: 25.gigabyte ) - expect(subject).not_to include(excluded.index) + expect(subject).to include(included.index) end context 'with time frozen' do diff --git a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb index a8f196d8f0e..1b409924acc 100644 --- a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb +++ b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb @@ -11,6 +11,8 @@ RSpec.describe Gitlab::Database::Reindexing::ReindexAction do swapout_view_for_table(:postgres_indexes) end + it { is_expected.to be_a Gitlab::Database::SharedModel } + describe '.create_for' do subject { described_class.create_for(index) } diff --git a/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb index 6f87475fc94..db267ff4f14 100644 --- a/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb +++ b/spec/lib/gitlab/database/reindexing/reindex_concurrently_spec.rb @@ -62,7 +62,7 @@ RSpec.describe Gitlab::Database::Reindexing::ReindexConcurrently, '#perform' do it 'recreates the index using REINDEX with a long statement timeout' do expect_to_execute_in_order( - "SET statement_timeout TO '32400s'", + "SET statement_timeout TO '86400s'", "REINDEX INDEX CONCURRENTLY \"public\".\"#{index.name}\"", "RESET statement_timeout" ) @@ -84,7 +84,7 @@ RSpec.describe Gitlab::Database::Reindexing::ReindexConcurrently, '#perform' do it 'drops the dangling indexes while controlling lock_timeout' do expect_to_execute_in_order( # Regular index rebuild - "SET statement_timeout TO '32400s'", + "SET statement_timeout TO '86400s'", "REINDEX INDEX CONCURRENTLY \"public\".\"#{index_name}\"", "RESET statement_timeout", # Drop _ccnew index diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb index 550f9db2b5b..13aff343432 100644 --- a/spec/lib/gitlab/database/reindexing_spec.rb +++ b/spec/lib/gitlab/database/reindexing_spec.rb @@ -4,10 +4,63 @@ require 'spec_helper' RSpec.describe Gitlab::Database::Reindexing do include ExclusiveLeaseHelpers + include Database::DatabaseHelpers - describe '.perform' do - subject { described_class.perform(candidate_indexes) } + describe '.automatic_reindexing' do + subject { described_class.automatic_reindexing(maximum_records: limit) } + let(:limit) { 5 } + + before_all do + swapout_view_for_table(:postgres_indexes) + end + + before do + allow(Gitlab::Database::Reindexing).to receive(:cleanup_leftovers!) + allow(Gitlab::Database::Reindexing).to receive(:perform_from_queue).and_return(0) + allow(Gitlab::Database::Reindexing).to receive(:perform_with_heuristic).and_return(0) + end + + it 'cleans up leftovers, before consuming the queue' do + expect(Gitlab::Database::Reindexing).to receive(:cleanup_leftovers!).ordered + expect(Gitlab::Database::Reindexing).to receive(:perform_from_queue).ordered + + subject + end + + context 'with records in the queue' do + before do + create(:reindexing_queued_action) + end + + context 'with enough records in the queue to reach limit' do + let(:limit) { 1 } + + it 'does not perform reindexing with heuristic' do + expect(Gitlab::Database::Reindexing).to receive(:perform_from_queue).and_return(limit) + expect(Gitlab::Database::Reindexing).not_to receive(:perform_with_heuristic) + + subject + end + end + + context 'without enough records in the queue to reach limit' do + let(:limit) { 2 } + + it 'continues if the queue did not have enough records' do + expect(Gitlab::Database::Reindexing).to receive(:perform_from_queue).ordered.and_return(1) + expect(Gitlab::Database::Reindexing).to receive(:perform_with_heuristic).with(maximum_records: 1).ordered + + subject + end + end + end + end + + describe '.perform_with_heuristic' do + subject { described_class.perform_with_heuristic(candidate_indexes, maximum_records: limit) } + + let(:limit) { 2 } let(:coordinator) { instance_double(Gitlab::Database::Reindexing::Coordinator) } let(:index_selection) { instance_double(Gitlab::Database::Reindexing::IndexSelection) } let(:candidate_indexes) { double } @@ -15,7 +68,7 @@ RSpec.describe Gitlab::Database::Reindexing do it 'delegates to Coordinator' do expect(Gitlab::Database::Reindexing::IndexSelection).to receive(:new).with(candidate_indexes).and_return(index_selection) - expect(index_selection).to receive(:take).with(2).and_return(indexes) + expect(index_selection).to receive(:take).with(limit).and_return(indexes) indexes.each do |index| expect(Gitlab::Database::Reindexing::Coordinator).to receive(:new).with(index).and_return(coordinator) @@ -26,6 +79,59 @@ RSpec.describe Gitlab::Database::Reindexing do end end + describe '.perform_from_queue' do + subject { described_class.perform_from_queue(maximum_records: limit) } + + before_all do + swapout_view_for_table(:postgres_indexes) + end + + let(:limit) { 2 } + let(:queued_actions) { create_list(:reindexing_queued_action, 3) } + let(:coordinator) { instance_double(Gitlab::Database::Reindexing::Coordinator) } + + before do + queued_actions.take(limit).each do |action| + allow(Gitlab::Database::Reindexing::Coordinator).to receive(:new).with(action.index).and_return(coordinator) + allow(coordinator).to receive(:perform) + end + end + + it 'consumes the queue in order of created_at and applies the limit' do + queued_actions.take(limit).each do |action| + expect(Gitlab::Database::Reindexing::Coordinator).to receive(:new).ordered.with(action.index).and_return(coordinator) + expect(coordinator).to receive(:perform) + end + + subject + end + + it 'updates queued action and sets state to done' do + subject + + queue = queued_actions + + queue.shift(limit).each do |action| + expect(action.reload.state).to eq('done') + end + + queue.each do |action| + expect(action.reload.state).to eq('queued') + end + end + + it 'updates queued action upon error and sets state to failed' do + expect(Gitlab::Database::Reindexing::Coordinator).to receive(:new).ordered.with(queued_actions.first.index).and_return(coordinator) + expect(coordinator).to receive(:perform).and_raise('something went wrong') + + subject + + states = queued_actions.map(&:reload).map(&:state) + + expect(states).to eq(%w(failed done queued)) + end + end + describe '.cleanup_leftovers!' do subject { described_class.cleanup_leftovers! } diff --git a/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb index 8c0c4155ccc..7caee414719 100644 --- a/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb +++ b/spec/lib/gitlab/database/schema_cache_with_renamed_table_spec.rb @@ -11,12 +11,12 @@ RSpec.describe Gitlab::Database::SchemaCacheWithRenamedTable do let(:new_model) do Class.new(ActiveRecord::Base) do - self.table_name = 'projects_new' + self.table_name = '_test_projects_new' end end before do - stub_const('Gitlab::Database::TABLES_TO_BE_RENAMED', { 'projects' => 'projects_new' }) + stub_const('Gitlab::Database::TABLES_TO_BE_RENAMED', { 'projects' => '_test_projects_new' }) end context 'when table is not renamed yet' do @@ -32,8 +32,8 @@ RSpec.describe Gitlab::Database::SchemaCacheWithRenamedTable do context 'when table is renamed' do before do - ActiveRecord::Base.connection.execute("ALTER TABLE projects RENAME TO projects_new") - ActiveRecord::Base.connection.execute("CREATE VIEW projects AS SELECT * FROM projects_new") + ActiveRecord::Base.connection.execute("ALTER TABLE projects RENAME TO _test_projects_new") + ActiveRecord::Base.connection.execute("CREATE VIEW projects AS SELECT * FROM _test_projects_new") old_model.reset_column_information ActiveRecord::Base.connection.schema_cache.clear! @@ -54,14 +54,14 @@ RSpec.describe Gitlab::Database::SchemaCacheWithRenamedTable do it 'has the same indexes' do indexes_for_old_table = ActiveRecord::Base.connection.schema_cache.indexes('projects') - indexes_for_new_table = ActiveRecord::Base.connection.schema_cache.indexes('projects_new') + indexes_for_new_table = ActiveRecord::Base.connection.schema_cache.indexes('_test_projects_new') expect(indexes_for_old_table).to eq(indexes_for_new_table) end it 'has the same column_hash' do columns_hash_for_old_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects') - columns_hash_for_new_table = ActiveRecord::Base.connection.schema_cache.columns_hash('projects_new') + columns_hash_for_new_table = ActiveRecord::Base.connection.schema_cache.columns_hash('_test_projects_new') expect(columns_hash_for_old_table).to eq(columns_hash_for_new_table) end diff --git a/spec/lib/gitlab/database/schema_migrations/context_spec.rb b/spec/lib/gitlab/database/schema_migrations/context_spec.rb index 0323fa22b78..07c97ea0ec3 100644 --- a/spec/lib/gitlab/database/schema_migrations/context_spec.rb +++ b/spec/lib/gitlab/database/schema_migrations/context_spec.rb @@ -14,7 +14,7 @@ RSpec.describe Gitlab::Database::SchemaMigrations::Context do end context 'CI database' do - let(:connection_class) { Ci::CiDatabaseRecord } + let(:connection_class) { Ci::ApplicationRecord } it 'returns a directory path that is database specific' do skip_if_multiple_databases_not_setup diff --git a/spec/lib/gitlab/database/shared_model_spec.rb b/spec/lib/gitlab/database/shared_model_spec.rb index 5d616aeb05f..94f2b5a3434 100644 --- a/spec/lib/gitlab/database/shared_model_spec.rb +++ b/spec/lib/gitlab/database/shared_model_spec.rb @@ -27,6 +27,38 @@ RSpec.describe Gitlab::Database::SharedModel do end end + context 'when multiple connection overrides are nested', :aggregate_failures do + let(:second_connection) { double('connection') } + + it 'allows the nesting with the same connection object' do + expect_original_connection_around do + described_class.using_connection(new_connection) do + expect(described_class.connection).to be(new_connection) + + described_class.using_connection(new_connection) do + expect(described_class.connection).to be(new_connection) + end + + expect(described_class.connection).to be(new_connection) + end + end + end + + it 'raises an error if the connection is changed' do + expect_original_connection_around do + described_class.using_connection(new_connection) do + expect(described_class.connection).to be(new_connection) + + expect do + described_class.using_connection(second_connection) {} + end.to raise_error(/cannot nest connection overrides/) + + expect(described_class.connection).to be(new_connection) + end + end + end + end + context 'when the block raises an error', :aggregate_failures do it 're-raises the error, removing the overridden connection' do expect_original_connection_around do diff --git a/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb index 2955c208f16..bbddb5f1af5 100644 --- a/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb +++ b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb @@ -7,7 +7,7 @@ RSpec.describe Gitlab::Database::UnidirectionalCopyTrigger do let(:table_name) { '_test_table' } let(:connection) { ActiveRecord::Base.connection } - let(:copy_trigger) { described_class.on_table(table_name) } + let(:copy_trigger) { described_class.on_table(table_name, connection: connection) } describe '#name' do context 'when a single column name is given' do diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb index a2e7b6d27b9..5ec7c338a2a 100644 --- a/spec/lib/gitlab/database_spec.rb +++ b/spec/lib/gitlab/database_spec.rb @@ -15,13 +15,6 @@ RSpec.describe Gitlab::Database do end end - describe '.databases' do - it 'stores connections as a HashWithIndifferentAccess' do - expect(described_class.databases.has_key?('main')).to be true - expect(described_class.databases.has_key?(:main)).to be true - end - end - describe '.default_pool_size' do before do allow(Gitlab::Runtime).to receive(:max_threads).and_return(7) @@ -112,18 +105,30 @@ RSpec.describe Gitlab::Database do end describe '.check_postgres_version_and_print_warning' do + let(:reflect) { instance_spy(Gitlab::Database::Reflection) } + subject { described_class.check_postgres_version_and_print_warning } + before do + allow(Gitlab::Database::Reflection) + .to receive(:new) + .and_return(reflect) + end + it 'prints a warning if not compliant with minimum postgres version' do - allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_return(false) + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false) - expect(Kernel).to receive(:warn).with(/You are using PostgreSQL/) + expect(Kernel) + .to receive(:warn) + .with(/You are using PostgreSQL/) + .exactly(Gitlab::Database.database_base_models.length) + .times subject end it 'doesnt print a warning if compliant with minimum postgres version' do - allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_return(true) + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(true) expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/) @@ -131,7 +136,7 @@ RSpec.describe Gitlab::Database do end it 'doesnt print a warning in Rails runner environment' do - allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_return(false) + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_return(false) allow(Gitlab::Runtime).to receive(:rails_runner?).and_return(true) expect(Kernel).not_to receive(:warn).with(/You are using PostgreSQL/) @@ -140,13 +145,13 @@ RSpec.describe Gitlab::Database do end it 'ignores ActiveRecord errors' do - allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError) + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(ActiveRecord::ActiveRecordError) expect { subject }.not_to raise_error end it 'ignores Postgres errors' do - allow(described_class.main).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error) + allow(reflect).to receive(:postgresql_minimum_supported_version?).and_raise(PG::Error) expect { subject }.not_to raise_error end @@ -205,7 +210,7 @@ RSpec.describe Gitlab::Database do context 'when replicas are configured', :database_replica do it 'returns the name for a replica' do - replica = ActiveRecord::Base.connection.load_balancer.host + replica = ActiveRecord::Base.load_balancer.host expect(described_class.db_config_name(replica)).to eq('main_replica') end diff --git a/spec/lib/gitlab/diff/file_spec.rb b/spec/lib/gitlab/diff/file_spec.rb index 1800d2d6b60..4b437397688 100644 --- a/spec/lib/gitlab/diff/file_spec.rb +++ b/spec/lib/gitlab/diff/file_spec.rb @@ -51,6 +51,48 @@ RSpec.describe Gitlab::Diff::File do project.commit(branch_name).diffs.diff_files.first end + describe 'initialize' do + context 'when file is ipynb with a change after transformation' do + let(:commit) { project.commit("f6b7a707") } + let(:diff) { commit.raw_diffs.first } + let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) } + + context 'and :jupyter_clean_diffs is enabled' do + before do + stub_feature_flags(jupyter_clean_diffs: true) + end + + it 'recreates the diff by transforming the files' do + expect(diff_file.diff.diff).not_to include('"| Fake') + end + end + + context 'but :jupyter_clean_diffs is disabled' do + before do + stub_feature_flags(jupyter_clean_diffs: false) + end + + it 'does not recreate the diff' do + expect(diff_file.diff.diff).to include('"| Fake') + end + end + end + + context 'when file is ipynb, but there only changes that are removed' do + let(:commit) { project.commit("2b5ef814") } + let(:diff) { commit.raw_diffs.first } + let(:diff_file) { described_class.new(diff, diff_refs: commit.diff_refs, repository: project.repository) } + + before do + stub_feature_flags(jupyter_clean_diffs: true) + end + + it 'does not recreate the diff' do + expect(diff_file.diff.diff).to include('execution_count') + end + end + end + describe '#diff_lines' do let(:diff_lines) { diff_file.diff_lines } diff --git a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb index bdeaabec1f1..b646cf38178 100644 --- a/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb +++ b/spec/lib/gitlab/diff/position_tracer/line_strategy_spec.rb @@ -581,13 +581,16 @@ RSpec.describe Gitlab::Diff::PositionTracer::LineStrategy, :clean_gitlab_redis_c ) end - it "returns the new position but drops line_range information" do + it "returns the new position" do expect_change_position( old_path: file_name, new_path: file_name, old_line: nil, new_line: 2, - line_range: nil + line_range: { + "start_line_code" => 1, + "end_line_code" => 2 + } ) end end diff --git a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb index 8cb1ccc065b..c579027788d 100644 --- a/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb +++ b/spec/lib/gitlab/email/handler/service_desk_handler_spec.rb @@ -11,6 +11,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do end let(:email_raw) { email_fixture('emails/service_desk.eml') } + let(:author_email) { 'jake@adventuretime.ooo' } let_it_be(:group) { create(:group, :private, name: "email") } let(:expected_description) do @@ -45,7 +46,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do receiver.execute new_issue = Issue.last - expect(new_issue.issue_email_participants.first.email).to eq("jake@adventuretime.ooo") + expect(new_issue.issue_email_participants.first.email).to eq(author_email) end it 'sends thank you email' do @@ -196,60 +197,123 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do end end - context 'when using service desk key' do - let_it_be(:service_desk_key) { 'mykey' } + context 'when all lines of email are quoted' do + let(:email_raw) { email_fixture('emails/service_desk_all_quoted.eml') } - let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml') } + it 'creates email with correct body' do + receiver.execute + + issue = Issue.last + expect(issue.description).to include('> This is an empty quote') + end + end + + context 'when using custom service desk address' do let(:receiver) { Gitlab::Email::ServiceDeskReceiver.new(email_raw) } before do stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com') end - before_all do - create(:service_desk_setting, project: project, project_key: service_desk_key) - end + context 'when using project key' do + let_it_be(:service_desk_key) { 'mykey' } - it_behaves_like 'a new issue request' + let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml') } + + before_all do + create(:service_desk_setting, project: project, project_key: service_desk_key) + end + + it_behaves_like 'a new issue request' + + context 'when there is no project with the key' do + let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', key: 'some_key') } + + it 'bounces the email' do + expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound) + end + end + + context 'when the project slug does not match' do + let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', slug: 'some-slug') } + + it 'bounces the email' do + expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound) + end + end + + context 'when there are multiple projects with same key' do + let_it_be(:project_with_same_key) { create(:project, group: group, service_desk_enabled: true) } + + let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', slug: project_with_same_key.full_path_slug.to_s) } - context 'when there is no project with the key' do - let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', key: 'some_key') } + before do + create(:service_desk_setting, project: project_with_same_key, project_key: service_desk_key) + end - it 'bounces the email' do - expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound) + it 'process email for project with matching slug' do + expect { receiver.execute }.to change { Issue.count }.by(1) + expect(Issue.last.project).to eq(project_with_same_key) + end end end - context 'when the project slug does not match' do - let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', slug: 'some-slug') } + context 'when project key is not set' do + let(:email_raw) { email_fixture('emails/service_desk_custom_address_no_key.eml') } - it 'bounces the email' do - expect { receiver.execute }.to raise_error(Gitlab::Email::ProjectNotFound) + before do + stub_service_desk_email_setting(enabled: true, address: 'support+%{key}@example.com') end + + it_behaves_like 'a new issue request' end + end + end - context 'when there are multiple projects with same key' do - let_it_be(:project_with_same_key) { create(:project, group: group, service_desk_enabled: true) } + context 'when rate limiting is in effect', :freeze_time, :clean_gitlab_redis_rate_limiting do + let(:receiver) { Gitlab::Email::Receiver.new(email_raw) } - let(:email_raw) { service_desk_fixture('emails/service_desk_custom_address.eml', slug: project_with_same_key.full_path_slug.to_s) } + subject { 2.times { receiver.execute } } - before do - create(:service_desk_setting, project: project_with_same_key, project_key: service_desk_key) + before do + stub_feature_flags(rate_limited_service_issues_create: true) + stub_application_setting(issues_create_limit: 1) + end + + context 'when too many requests are sent by one user' do + it 'raises an error' do + expect { subject }.to raise_error(RateLimitedService::RateLimitedError) + end + + it 'creates 1 issue' do + expect do + subject + rescue RateLimitedService::RateLimitedError + end.to change { Issue.count }.by(1) + end + + context 'when requests are sent by different users' do + let(:email_raw_2) { email_fixture('emails/service_desk_forwarded.eml') } + let(:receiver2) { Gitlab::Email::Receiver.new(email_raw_2) } + + subject do + receiver.execute + receiver2.execute end - it 'process email for project with matching slug' do - expect { receiver.execute }.to change { Issue.count }.by(1) - expect(Issue.last.project).to eq(project_with_same_key) + it 'creates 2 issues' do + expect { subject }.to change { Issue.count }.by(2) end end end - context 'when rate limiting is in effect' do - it 'allows unlimited new issue creation' do - stub_application_setting(issues_create_limit: 1) - setup_attachment + context 'when limit is higher than sent emails' do + before do + stub_application_setting(issues_create_limit: 2) + end - expect { 2.times { receiver.execute } }.to change { Issue.count }.by(2) + it 'creates 2 issues' do + expect { subject }.to change { Issue.count }.by(2) end end end @@ -323,6 +387,7 @@ RSpec.describe Gitlab::Email::Handler::ServiceDeskHandler do end context 'when the email is forwarded through an alias' do + let(:author_email) { 'jake.g@adventuretime.ooo' } let(:email_raw) { email_fixture('emails/service_desk_forwarded.eml') } it_behaves_like 'a new issue request' diff --git a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb index 0a1f04ed793..352eb596cd9 100644 --- a/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb +++ b/spec/lib/gitlab/email/hook/smime_signature_interceptor_spec.rb @@ -36,7 +36,7 @@ RSpec.describe Gitlab::Email::Hook::SmimeSignatureInterceptor do end before do - allow(Gitlab::X509::Certificate).to receive_messages(from_files: certificate) + allow(Gitlab::Email::Hook::SmimeSignatureInterceptor).to receive(:certificate).and_return(certificate) Mail.register_interceptor(described_class) mail.deliver_now diff --git a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb index 277f1158f8b..0521123f1ef 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/base_spec.rb @@ -82,4 +82,29 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Base do it { is_expected.to include('This is email 1 of 3 in the Create series', Gitlab::Routing.url_helpers.profile_notifications_url) } end end + + describe '#series?' do + using RSpec::Parameterized::TableSyntax + + subject do + test_class = "Gitlab::Email::Message::InProductMarketing::#{track.to_s.classify}".constantize + test_class.new(group: group, user: user, series: series).series? + end + + where(:track, :result) do + :create | true + :team_short | true + :trial_short | true + :admin_verify | true + :verify | true + :trial | true + :team | true + :experience | true + :invite_team | false + end + + with_them do + it { is_expected.to eq result } + end + end end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb index b742eff3f56..8cd2345822e 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing/experience_spec.rb @@ -22,14 +22,36 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Experience do expect(message.cta_text).to be_nil end - describe '#feedback_link' do - let(:member_count) { 2 } + describe 'feedback URL' do + before do + allow(message).to receive(:onboarding_progress).and_return(1) + allow(message).to receive(:show_invite_link).and_return(true) + end + + subject do + message.feedback_link(1) + end + + it { is_expected.to start_with(Gitlab::Saas.com_url) } + + context 'when in development' do + let(:root_url) { 'http://example.com' } + + before do + allow(message).to receive(:root_url).and_return(root_url) + stub_rails_env('development') + end + + it { is_expected.to start_with(root_url) } + end + end + + describe 'feedback URL show_invite_link query param' do let(:user_access) { GroupMember::DEVELOPER } let(:preferred_language) { 'en' } before do allow(message).to receive(:onboarding_progress).and_return(1) - allow(group).to receive(:member_count).and_return(member_count) allow(group).to receive(:max_member_access_for_user).and_return(user_access) allow(user).to receive(:preferred_language).and_return(preferred_language) end @@ -41,12 +63,6 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Experience do it { is_expected.to eq('true') } - context 'with only one member' do - let(:member_count) { 1 } - - it { is_expected.to eq('false') } - end - context 'with less than developer access' do let(:user_access) { GroupMember::GUEST } @@ -59,6 +75,41 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing::Experience do it { is_expected.to eq('false') } end end + + describe 'feedback URL show_incentive query param' do + let(:show_invite_link) { true } + let(:member_count) { 2 } + let(:query) do + uri = URI.parse(message.feedback_link(1)) + Rack::Utils.parse_query(uri.query).with_indifferent_access + end + + before do + allow(message).to receive(:onboarding_progress).and_return(1) + allow(message).to receive(:show_invite_link).and_return(show_invite_link) + allow(group).to receive(:member_count).and_return(member_count) + end + + subject { query[:show_incentive] } + + it { is_expected.to eq('true') } + + context 'with only one member' do + let(:member_count) { 1 } + + it "is not present" do + expect(query).not_to have_key(:show_incentive) + end + end + + context 'show_invite_link is false' do + let(:show_invite_link) { false } + + it "is not present" do + expect(query).not_to have_key(:show_incentive) + end + end + end end end end diff --git a/spec/lib/gitlab/email/message/in_product_marketing/invite_team_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing/invite_team_spec.rb new file mode 100644 index 00000000000..8319560f594 --- /dev/null +++ b/spec/lib/gitlab/email/message/in_product_marketing/invite_team_spec.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Email::Message::InProductMarketing::InviteTeam do + let_it_be(:group) { build(:group) } + let_it_be(:user) { build(:user) } + + let(:series) { 0 } + + subject(:message) { described_class.new(group: group, user: user, series: series) } + + describe 'initialize' do + context 'when series is valid' do + it 'does not raise error' do + expect { subject }.not_to raise_error(ArgumentError) + end + end + + context 'when series is invalid' do + let(:series) { 1 } + + it 'raises error' do + expect { subject }.to raise_error(ArgumentError) + end + end + end + + it 'contains the correct message', :aggregate_failures do + expect(message.subject_line).to eq 'Invite your teammates to GitLab' + expect(message.tagline).to be_empty + expect(message.title).to eq 'GitLab is better with teammates to help out!' + expect(message.subtitle).to be_empty + expect(message.body_line1).to eq 'Invite your teammates today and build better code together. You can even assign tasks to new teammates such as setting up CI/CD, to help get projects up and running.' + expect(message.body_line2).to be_empty + expect(message.cta_text).to eq 'Invite your teammates to help' + expect(message.logo_path).to eq 'mailers/in_product_marketing/team-0.png' + end +end diff --git a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb index 9ffc4a340a3..594df7440bb 100644 --- a/spec/lib/gitlab/email/message/in_product_marketing_spec.rb +++ b/spec/lib/gitlab/email/message/in_product_marketing_spec.rb @@ -10,10 +10,15 @@ RSpec.describe Gitlab::Email::Message::InProductMarketing do context 'when track exists' do where(:track, :expected_class) do - :create | described_class::Create - :verify | described_class::Verify - :trial | described_class::Trial - :team | described_class::Team + :create | described_class::Create + :team_short | described_class::TeamShort + :trial_short | described_class::TrialShort + :admin_verify | described_class::AdminVerify + :verify | described_class::Verify + :trial | described_class::Trial + :team | described_class::Team + :experience | described_class::Experience + :invite_team | described_class::InviteTeam end with_them do diff --git a/spec/lib/gitlab/email/reply_parser_spec.rb b/spec/lib/gitlab/email/reply_parser_spec.rb index 3b01b568fb4..c0d177aff4d 100644 --- a/spec/lib/gitlab/email/reply_parser_spec.rb +++ b/spec/lib/gitlab/email/reply_parser_spec.rb @@ -21,6 +21,30 @@ RSpec.describe Gitlab::Email::ReplyParser do expect(test_parse_body(fixture_file("emails/no_content_reply.eml"))).to eq("") end + context 'when allow_only_quotes is true' do + it "returns quoted text from email" do + text = test_parse_body(fixture_file("emails/no_content_reply.eml"), allow_only_quotes: true) + + expect(text).to eq( + <<-BODY.strip_heredoc.chomp + > + > + > + > eviltrout posted in 'Adventure Time Sux' on Discourse Meta: + > + > --- + > hey guys everyone knows adventure time sucks! + > + > --- + > Please visit this link to respond: http://localhost:3000/t/adventure-time-sux/1234/3 + > + > To unsubscribe from these emails, visit your [user preferences](http://localhost:3000/user_preferences). + > + BODY + ) + end + end + it "properly renders plaintext-only email" do expect(test_parse_body(fixture_file("emails/plaintext_only.eml"))) .to eq( diff --git a/spec/lib/gitlab/emoji_spec.rb b/spec/lib/gitlab/emoji_spec.rb index 8f855489c12..0db3b5f3b11 100644 --- a/spec/lib/gitlab/emoji_spec.rb +++ b/spec/lib/gitlab/emoji_spec.rb @@ -3,90 +3,6 @@ require 'spec_helper' RSpec.describe Gitlab::Emoji do - let_it_be(:emojis) { Gemojione.index.instance_variable_get(:@emoji_by_name) } - let_it_be(:emojis_by_moji) { Gemojione.index.instance_variable_get(:@emoji_by_moji) } - let_it_be(:emoji_unicode_versions_by_name) { Gitlab::Json.parse(File.read(Rails.root.join('fixtures', 'emojis', 'emoji-unicode-version-map.json'))) } - let_it_be(:emojis_aliases) { Gitlab::Json.parse(File.read(Rails.root.join('fixtures', 'emojis', 'aliases.json'))) } - - describe '.emojis' do - it 'returns emojis' do - current_emojis = described_class.emojis - - expect(current_emojis).to eq(emojis) - end - end - - describe '.emojis_by_moji' do - it 'return emojis by moji' do - current_emojis_by_moji = described_class.emojis_by_moji - - expect(current_emojis_by_moji).to eq(emojis_by_moji) - end - end - - describe '.emojis_unicodes' do - it 'returns emoji unicodes' do - emoji_keys = described_class.emojis_unicodes - - expect(emoji_keys).to eq(emojis_by_moji.keys) - end - end - - describe '.emojis_names' do - it 'returns emoji names' do - emoji_names = described_class.emojis_names - - expect(emoji_names).to eq(emojis.keys) - end - end - - describe '.emojis_aliases' do - it 'returns emoji aliases' do - emoji_aliases = described_class.emojis_aliases - - expect(emoji_aliases).to eq(emojis_aliases) - end - end - - describe '.emoji_filename' do - it 'returns emoji filename' do - # "100" => {"unicode"=>"1F4AF"...} - emoji_filename = described_class.emoji_filename('100') - - expect(emoji_filename).to eq(emojis['100']['unicode']) - end - end - - describe '.emoji_unicode_filename' do - it 'returns emoji unicode filename' do - emoji_unicode_filename = described_class.emoji_unicode_filename('💯') - - expect(emoji_unicode_filename).to eq(emojis_by_moji['💯']['unicode']) - end - end - - describe '.emoji_unicode_version' do - it 'returns emoji unicode version by name' do - emoji_unicode_version = described_class.emoji_unicode_version('100') - - expect(emoji_unicode_version).to eq(emoji_unicode_versions_by_name['100']) - end - end - - describe '.normalize_emoji_name' do - it 'returns same name if not found in aliases' do - emoji_name = described_class.normalize_emoji_name('random') - - expect(emoji_name).to eq('random') - end - - it 'returns name if name found in aliases' do - emoji_name = described_class.normalize_emoji_name('small_airplane') - - expect(emoji_name).to eq(emojis_aliases['small_airplane']) - end - end - describe '.emoji_image_tag' do it 'returns emoji image tag' do emoji_image = described_class.emoji_image_tag('emoji_one', 'src_url') @@ -104,29 +20,17 @@ RSpec.describe Gitlab::Emoji do end end - describe '.emoji_exists?' do - it 'returns true if the name exists' do - emoji_exists = described_class.emoji_exists?('100') - - expect(emoji_exists).to be_truthy - end - - it 'returns false if the name does not exist' do - emoji_exists = described_class.emoji_exists?('random') - - expect(emoji_exists).to be_falsey - end - end - describe '.gl_emoji_tag' do it 'returns gl emoji tag if emoji is found' do - gl_tag = described_class.gl_emoji_tag('small_airplane') + emoji = TanukiEmoji.find_by_alpha_code('small_airplane') + gl_tag = described_class.gl_emoji_tag(emoji) expect(gl_tag).to eq('<gl-emoji title="small airplane" data-name="airplane_small" data-unicode-version="7.0">🛩</gl-emoji>') end - it 'returns nil if emoji name is not found' do - gl_tag = described_class.gl_emoji_tag('random') + it 'returns nil if emoji is not found' do + emoji = TanukiEmoji.find_by_alpha_code('random') + gl_tag = described_class.gl_emoji_tag(emoji) expect(gl_tag).to be_nil end diff --git a/spec/lib/gitlab/etag_caching/middleware_spec.rb b/spec/lib/gitlab/etag_caching/middleware_spec.rb index c4da89e5f5c..982c0d911bc 100644 --- a/spec/lib/gitlab/etag_caching/middleware_spec.rb +++ b/spec/lib/gitlab/etag_caching/middleware_spec.rb @@ -174,7 +174,7 @@ RSpec.describe Gitlab::EtagCaching::Middleware, :clean_gitlab_redis_shared_state it "pushes route's feature category to the context" do expect(Gitlab::ApplicationContext).to receive(:push).with( - feature_category: 'issue_tracking' + feature_category: 'team_planning' ) _, _, _ = middleware.call(build_request(path, if_none_match)) diff --git a/spec/lib/gitlab/git/commit_spec.rb b/spec/lib/gitlab/git/commit_spec.rb index f4dba5e8d58..11510daf9c0 100644 --- a/spec/lib/gitlab/git/commit_spec.rb +++ b/spec/lib/gitlab/git/commit_spec.rb @@ -715,6 +715,14 @@ RSpec.describe Gitlab::Git::Commit, :seed_helper do it { is_expected.not_to include("feature") } end + describe '#first_ref_by_oid' do + let(:commit) { described_class.find(repository, 'master') } + + subject { commit.first_ref_by_oid(repository) } + + it { is_expected.to eq("master") } + end + describe '.get_message' do let(:commit_ids) { %w[6d394385cf567f80a8fd85055db1ab4c5295806f cfe32cf61b73a0d5e9f13e774abde7ff789b1660] } diff --git a/spec/lib/gitlab/git/object_pool_spec.rb b/spec/lib/gitlab/git/object_pool_spec.rb index e1873c6ddb5..91960ebbede 100644 --- a/spec/lib/gitlab/git/object_pool_spec.rb +++ b/spec/lib/gitlab/git/object_pool_spec.rb @@ -112,7 +112,7 @@ RSpec.describe Gitlab::Git::ObjectPool do subject.fetch - expect(subject.repository.commit_count('refs/remotes/origin/master')).to eq(commit_count) + expect(subject.repository.commit_count('refs/remotes/origin/heads/master')).to eq(commit_count) expect(subject.repository.commit(new_commit_id).id).to eq(new_commit_id) end end diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb index c7b68ff3e28..f1b6a59abf9 100644 --- a/spec/lib/gitlab/git/repository_spec.rb +++ b/spec/lib/gitlab/git/repository_spec.rb @@ -125,7 +125,22 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do it 'gets tags from GitalyClient' do expect_next_instance_of(Gitlab::GitalyClient::RefService) do |service| - expect(service).to receive(:tags).with(sort_by: 'name_asc') + expect(service).to receive(:tags).with(sort_by: 'name_asc', pagination_params: nil) + end + + subject + end + end + + context 'with pagination option' do + subject { repository.tags(pagination_params: { limit: 5, page_token: 'refs/tags/v1.0.0' }) } + + it 'gets tags from GitalyClient' do + expect_next_instance_of(Gitlab::GitalyClient::RefService) do |service| + expect(service).to receive(:tags).with( + sort_by: nil, + pagination_params: { limit: 5, page_token: 'refs/tags/v1.0.0' } + ) end subject @@ -1888,6 +1903,44 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do end end + describe '#list_refs' do + it 'returns a list of branches with their head commit' do + refs = repository.list_refs + reference = refs.first + + expect(refs).to be_an(Enumerable) + expect(reference).to be_a(Gitaly::ListRefsResponse::Reference) + expect(reference.name).to be_a(String) + expect(reference.target).to be_a(String) + end + end + + describe '#refs_by_oid' do + it 'returns a list of refs from a OID' do + refs = repository.refs_by_oid(oid: repository.commit.id) + + expect(refs).to be_an(Array) + expect(refs).to include(Gitlab::Git::BRANCH_REF_PREFIX + repository.root_ref) + end + + it 'returns a single ref from a OID' do + refs = repository.refs_by_oid(oid: repository.commit.id, limit: 1) + + expect(refs).to be_an(Array) + expect(refs).to eq([Gitlab::Git::BRANCH_REF_PREFIX + repository.root_ref]) + end + + it 'returns empty for unknown ID' do + expect(repository.refs_by_oid(oid: Gitlab::Git::BLANK_SHA, limit: 0)).to eq([]) + end + + it 'returns nil for an empty repo' do + project = create(:project) + + expect(project.repository.refs_by_oid(oid: SeedRepo::Commit::ID, limit: 0)).to be_nil + end + end + describe '#set_full_path' do before do repository_rugged.config["gitlab.fullpath"] = repository_path diff --git a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb index 554a91f2bc5..d8e397dd6f3 100644 --- a/spec/lib/gitlab/gitaly_client/commit_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/commit_service_spec.rb @@ -112,15 +112,38 @@ RSpec.describe Gitlab::GitalyClient::CommitService do let(:from) { 'master' } let(:to) { Gitlab::Git::EMPTY_TREE_ID } - it 'sends an RPC request' do - request = Gitaly::CommitsBetweenRequest.new( - repository: repository_message, from: from, to: to - ) + context 'with between_commits_via_list_commits enabled' do + before do + stub_feature_flags(between_commits_via_list_commits: true) + end - expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:commits_between) - .with(request, kind_of(Hash)).and_return([]) + it 'sends an RPC request' do + request = Gitaly::ListCommitsRequest.new( + repository: repository_message, revisions: ["^" + from, to], reverse: true + ) + + expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:list_commits) + .with(request, kind_of(Hash)).and_return([]) - described_class.new(repository).between(from, to) + described_class.new(repository).between(from, to) + end + end + + context 'with between_commits_via_list_commits disabled' do + before do + stub_feature_flags(between_commits_via_list_commits: false) + end + + it 'sends an RPC request' do + request = Gitaly::CommitsBetweenRequest.new( + repository: repository_message, from: from, to: to + ) + + expect_any_instance_of(Gitaly::CommitService::Stub).to receive(:commits_between) + .with(request, kind_of(Hash)).and_return([]) + + described_class.new(repository).between(from, to) + end end end diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb index d308612ef31..2e37c98a591 100644 --- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb +++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb @@ -190,6 +190,22 @@ RSpec.describe Gitlab::GitalyClient::RefService do client.tags(sort_by: 'name_asc') end end + + context 'with pagination option' do + it 'sends a correct find_all_tags message' do + expected_pagination = Gitaly::PaginationParameter.new( + limit: 5, + page_token: 'refs/tags/v1.0.0' + ) + + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:find_all_tags) + .with(gitaly_request_with_params(pagination_params: expected_pagination), kind_of(Hash)) + .and_return([]) + + client.tags(pagination_params: { limit: 5, page_token: 'refs/tags/v1.0.0' }) + end + end end describe '#branch_names_contains_sha' do @@ -252,6 +268,26 @@ RSpec.describe Gitlab::GitalyClient::RefService do end end + describe '#list_refs' do + it 'sends a list_refs message' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:list_refs) + .with(gitaly_request_with_params(patterns: ['refs/heads/']), kind_of(Hash)) + .and_call_original + + client.list_refs + end + + it 'accepts a patterns argument' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:list_refs) + .with(gitaly_request_with_params(patterns: ['refs/tags/']), kind_of(Hash)) + .and_call_original + + client.list_refs([Gitlab::Git::TAG_REF_PREFIX]) + end + end + describe '#pack_refs' do it 'sends a pack_refs message' do expect_any_instance_of(Gitaly::RefService::Stub) @@ -262,4 +298,19 @@ RSpec.describe Gitlab::GitalyClient::RefService do client.pack_refs end end + + describe '#find_refs_by_oid' do + let(:oid) { project.repository.commit.id } + + it 'sends a find_refs_by_oid message' do + expect_any_instance_of(Gitaly::RefService::Stub) + .to receive(:find_refs_by_oid) + .with(gitaly_request_with_params(sort_field: 'refname', oid: oid, limit: 1), kind_of(Hash)) + .and_call_original + + refs = client.find_refs_by_oid(oid: oid, limit: 1) + + expect(refs.to_a).to eq([Gitlab::Git::BRANCH_REF_PREFIX + project.repository.root_ref]) + end + end end diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb index 16f75691288..ba4ea1069d8 100644 --- a/spec/lib/gitlab/gitaly_client_spec.rb +++ b/spec/lib/gitlab/gitaly_client_spec.rb @@ -5,14 +5,6 @@ require 'spec_helper' # We stub Gitaly in `spec/support/gitaly.rb` for other tests. We don't want # those stubs while testing the GitalyClient itself. RSpec.describe Gitlab::GitalyClient do - let(:sample_cert) { Rails.root.join('spec/fixtures/clusters/sample_cert.pem').to_s } - - before do - allow(described_class) - .to receive(:stub_cert_paths) - .and_return([sample_cert]) - end - def stub_repos_storages(address) allow(Gitlab.config.repositories).to receive(:storages).and_return({ 'default' => { 'gitaly_address' => address } @@ -142,21 +134,6 @@ RSpec.describe Gitlab::GitalyClient do end end - describe '.stub_certs' do - it 'skips certificates if OpenSSLError is raised and report it' do - expect(Gitlab::ErrorTracking) - .to receive(:track_and_raise_for_dev_exception) - .with( - a_kind_of(OpenSSL::X509::CertificateError), - cert_file: a_kind_of(String)).at_least(:once) - - expect(OpenSSL::X509::Certificate) - .to receive(:new) - .and_raise(OpenSSL::X509::CertificateError).at_least(:once) - - expect(described_class.stub_certs).to be_a(String) - end - end describe '.stub_creds' do it 'returns :this_channel_is_insecure if unix' do address = 'unix:/tmp/gitaly.sock' diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb index 6c94973b5a8..e170496ff7b 100644 --- a/spec/lib/gitlab/github_import/bulk_importing_spec.rb +++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb @@ -116,13 +116,13 @@ RSpec.describe Gitlab::GithubImport::BulkImporting do value: 5 ) - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .ordered .with('kittens', rows.first(5)) - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .ordered .with('kittens', rows.last(5)) diff --git a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb index 3dc15c7c059..0448ada6bca 100644 --- a/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/diff_note_importer_spec.rb @@ -2,156 +2,226 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter do - let(:project) { create(:project) } - let(:client) { double(:client) } - let(:user) { create(:user) } - let(:created_at) { Time.new(2017, 1, 1, 12, 00) } - let(:updated_at) { Time.new(2017, 1, 1, 12, 15) } +RSpec.describe Gitlab::GithubImport::Importer::DiffNoteImporter, :aggregate_failures do + let_it_be(:project) { create(:project, :repository) } + let_it_be(:user) { create(:user) } - let(:hunk) do - '@@ -1 +1 @@ + let(:client) { double(:client) } + let(:discussion_id) { 'b0fa404393eeebb4e82becb8104f238812bb1fe6' } + let(:created_at) { Time.new(2017, 1, 1, 12, 00).utc } + let(:updated_at) { Time.new(2017, 1, 1, 12, 15).utc } + let(:note_body) { 'Hello' } + let(:file_path) { 'files/ruby/popen.rb' } + + let(:diff_hunk) do + '@@ -14 +14 @@ -Hello +Hello world' end - let(:note) do + let(:note_representation) do Gitlab::GithubImport::Representation::DiffNote.new( noteable_type: 'MergeRequest', noteable_id: 1, commit_id: '123abc', original_commit_id: 'original123abc', - file_path: 'README.md', - diff_hunk: hunk, - author: Gitlab::GithubImport::Representation::User - .new(id: user.id, login: user.username), - note: 'Hello', + file_path: file_path, + author: Gitlab::GithubImport::Representation::User.new(id: user.id, login: user.username), + note: note_body, created_at: created_at, updated_at: updated_at, - github_id: 1 + start_line: nil, + end_line: 15, + github_id: 1, + diff_hunk: diff_hunk, + side: 'RIGHT' ) end - let(:importer) { described_class.new(note, project, client) } + subject(:importer) { described_class.new(note_representation, project, client) } + + shared_examples 'diff notes without suggestion' do + it 'imports the note as legacy diff note' do + stub_user_finder(user.id, true) + + expect { subject.execute } + .to change(LegacyDiffNote, :count) + .by(1) + + note = project.notes.diff_notes.take + expect(note).to be_valid + expect(note.author_id).to eq(user.id) + expect(note.commit_id).to eq('original123abc') + expect(note.created_at).to eq(created_at) + expect(note.diff).to be_an_instance_of(Gitlab::Git::Diff) + expect(note.discussion_id).to eq(discussion_id) + expect(note.line_code).to eq(note_representation.line_code) + expect(note.note).to eq('Hello') + expect(note.noteable_id).to eq(merge_request.id) + expect(note.noteable_type).to eq('MergeRequest') + expect(note.project_id).to eq(project.id) + expect(note.st_diff).to eq(note_representation.diff_hash) + expect(note.system).to eq(false) + expect(note.type).to eq('LegacyDiffNote') + expect(note.updated_at).to eq(updated_at) + end + + it 'adds a "created by:" note when the author cannot be found' do + stub_user_finder(project.creator_id, false) + + expect { subject.execute } + .to change(LegacyDiffNote, :count) + .by(1) + + note = project.notes.diff_notes.take + expect(note).to be_valid + expect(note.author_id).to eq(project.creator_id) + expect(note.note).to eq("*Created by: #{user.username}*\n\nHello") + end + + it 'does not import the note when a foreign key error is raised' do + stub_user_finder(project.creator_id, false) + + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) + .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key') + + expect { subject.execute } + .not_to change(LegacyDiffNote, :count) + end + end describe '#execute' do context 'when the merge request no longer exists' do it 'does not import anything' do - expect(Gitlab::Database.main).not_to receive(:bulk_insert) + expect(ApplicationRecord).not_to receive(:legacy_bulk_insert) - importer.execute + expect { subject.execute } + .to not_change(DiffNote, :count) + .and not_change(LegacyDiffNote, :count) end end context 'when the merge request exists' do - let!(:merge_request) do + let_it_be(:merge_request) do create(:merge_request, source_project: project, target_project: project) end before do - allow(importer) - .to receive(:find_merge_request_id) - .and_return(merge_request.id) + expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |finder| + expect(finder) + .to receive(:database_id) + .and_return(merge_request.id) + end + + expect(Discussion) + .to receive(:discussion_id) + .and_return(discussion_id) end - it 'imports the note' do - allow(importer.user_finder) - .to receive(:author_id_for) - .and_return([user.id, true]) - - expect(Gitlab::Database.main) - .to receive(:bulk_insert) - .with( - LegacyDiffNote.table_name, - [ - { - discussion_id: anything, - noteable_type: 'MergeRequest', - noteable_id: merge_request.id, - project_id: project.id, - author_id: user.id, - note: 'Hello', - system: false, - commit_id: 'original123abc', - line_code: note.line_code, - type: 'LegacyDiffNote', - created_at: created_at, - updated_at: updated_at, - st_diff: note.diff_hash.to_yaml - } - ] - ) - .and_call_original - - importer.execute + context 'when github_importer_use_diff_note_with_suggestions is disabled' do + before do + stub_feature_flags(github_importer_use_diff_note_with_suggestions: false) + end + + it_behaves_like 'diff notes without suggestion' + + context 'when the note has suggestions' do + let(:note_body) do + <<~EOB + Suggestion: + ```suggestion + what do you think to do it like this + ``` + EOB + end + + it 'imports the note' do + stub_user_finder(user.id, true) + + expect { subject.execute } + .to change(LegacyDiffNote, :count) + .and not_change(DiffNote, :count) + + note = project.notes.diff_notes.take + expect(note).to be_valid + expect(note.note) + .to eq <<~NOTE + Suggestion: + ```suggestion:-0+0 + what do you think to do it like this + ``` + NOTE + end + end end - it 'imports the note when the author could not be found' do - allow(importer.user_finder) - .to receive(:author_id_for) - .and_return([project.creator_id, false]) - - expect(Gitlab::Database.main) - .to receive(:bulk_insert) - .with( - LegacyDiffNote.table_name, - [ - { - discussion_id: anything, - noteable_type: 'MergeRequest', - noteable_id: merge_request.id, - project_id: project.id, - author_id: project.creator_id, - note: "*Created by: #{user.username}*\n\nHello", - system: false, - commit_id: 'original123abc', - line_code: note.line_code, - type: 'LegacyDiffNote', - created_at: created_at, - updated_at: updated_at, - st_diff: note.diff_hash.to_yaml - } - ] - ) - .and_call_original - - importer.execute - end - - it 'produces a valid LegacyDiffNote' do - allow(importer.user_finder) - .to receive(:author_id_for) - .and_return([user.id, true]) - - importer.execute - - note = project.notes.diff_notes.take - - expect(note).to be_valid - expect(note.diff).to be_an_instance_of(Gitlab::Git::Diff) - end - - it 'does not import the note when a foreign key error is raised' do - allow(importer.user_finder) - .to receive(:author_id_for) - .and_return([project.creator_id, false]) - - expect(Gitlab::Database.main) - .to receive(:bulk_insert) - .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key') - - expect { importer.execute }.not_to raise_error + context 'when github_importer_use_diff_note_with_suggestions is enabled' do + before do + stub_feature_flags(github_importer_use_diff_note_with_suggestions: true) + end + + it_behaves_like 'diff notes without suggestion' + + context 'when the note has suggestions' do + let(:note_body) do + <<~EOB + Suggestion: + ```suggestion + what do you think to do it like this + ``` + EOB + end + + it 'imports the note as diff note' do + stub_user_finder(user.id, true) + + expect { subject.execute } + .to change(DiffNote, :count) + .by(1) + + note = project.notes.diff_notes.take + expect(note).to be_valid + expect(note.noteable_type).to eq('MergeRequest') + expect(note.noteable_id).to eq(merge_request.id) + expect(note.project_id).to eq(project.id) + expect(note.author_id).to eq(user.id) + expect(note.system).to eq(false) + expect(note.discussion_id).to eq(discussion_id) + expect(note.commit_id).to eq('original123abc') + expect(note.line_code).to eq(note_representation.line_code) + expect(note.type).to eq('DiffNote') + expect(note.created_at).to eq(created_at) + expect(note.updated_at).to eq(updated_at) + expect(note.position.to_h).to eq({ + base_sha: merge_request.diffs.diff_refs.base_sha, + head_sha: merge_request.diffs.diff_refs.head_sha, + start_sha: merge_request.diffs.diff_refs.start_sha, + new_line: 15, + old_line: nil, + new_path: file_path, + old_path: file_path, + position_type: 'text', + line_range: nil + }) + expect(note.note) + .to eq <<~NOTE + Suggestion: + ```suggestion:-0+0 + what do you think to do it like this + ``` + NOTE + end + end end end end - describe '#find_merge_request_id' do - it 'returns a merge request ID' do - expect_next_instance_of(Gitlab::GithubImport::IssuableFinder) do |instance| - expect(instance).to receive(:database_id).and_return(10) - end - - expect(importer.find_merge_request_id).to eq(10) + def stub_user_finder(user, found) + expect_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder| + expect(finder) + .to receive(:author_id_for) + .and_return([user, found]) end end end diff --git a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb index be4fc3cbf16..1c7b35ed928 100644 --- a/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/diff_notes_importer_spec.rb @@ -19,7 +19,9 @@ RSpec.describe Gitlab::GithubImport::Importer::DiffNotesImporter do updated_at: Time.zone.now, line: 23, start_line: nil, + in_reply_to_id: nil, id: 1, + side: 'RIGHT', body: <<~BODY Hello World diff --git a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb index 0926000428c..4287c32b947 100644 --- a/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/issue_importer_spec.rb @@ -190,8 +190,8 @@ RSpec.describe Gitlab::GithubImport::Importer::IssueImporter, :clean_gitlab_redi .with(issue.assignees[1]) .and_return(5) - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .with( IssueAssignee.table_name, [{ issue_id: 1, user_id: 4 }, { issue_id: 1, user_id: 5 }] diff --git a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb index 241a0fef600..e68849755b2 100644 --- a/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/label_links_importer_spec.rb @@ -39,8 +39,8 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelLinksImporter do .and_return(1) freeze_time do - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .with( LabelLink.table_name, [ @@ -64,8 +64,8 @@ RSpec.describe Gitlab::GithubImport::Importer::LabelLinksImporter do .with('bug') .and_return(nil) - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .with(LabelLink.table_name, []) importer.create_labels diff --git a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb index 820f46c7286..96d8acbd3de 100644 --- a/spec/lib/gitlab/github_import/importer/note_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/note_importer_spec.rb @@ -41,8 +41,8 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do .with(github_note) .and_return([user.id, true]) - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .with( Note.table_name, [ @@ -71,8 +71,8 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do .with(github_note) .and_return([project.creator_id, false]) - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .with( Note.table_name, [ @@ -115,7 +115,7 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do context 'when the noteable does not exist' do it 'does not import the note' do - expect(Gitlab::Database.main).not_to receive(:bulk_insert) + expect(ApplicationRecord).not_to receive(:legacy_bulk_insert) importer.execute end @@ -134,8 +134,8 @@ RSpec.describe Gitlab::GithubImport::Importer::NoteImporter do .with(github_note) .and_return([user.id, true]) - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .and_raise(ActiveRecord::InvalidForeignKey, 'invalid foreign key') expect { importer.execute }.not_to raise_error diff --git a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb index 4a47d103cde..b6c162aafa9 100644 --- a/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb +++ b/spec/lib/gitlab/github_import/importer/pull_requests_merged_by_importer_spec.rb @@ -4,7 +4,8 @@ require 'spec_helper' RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do let(:client) { double } - let(:project) { create(:project, import_source: 'http://somegithub.com') } + + let_it_be(:project) { create(:project, import_source: 'http://somegithub.com') } subject { described_class.new(project, client) } @@ -27,14 +28,11 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do end describe '#each_object_to_import', :clean_gitlab_redis_cache do - it 'fetchs the merged pull requests data' do - create( - :merged_merge_request, - iid: 999, - source_project: project, - target_project: project - ) + let!(:merge_request) do + create(:merged_merge_request, iid: 999, source_project: project, target_project: project) + end + it 'fetches the merged pull requests data' do pull_request = double allow(client) @@ -48,5 +46,16 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestsMergedByImporter do subject.each_object_to_import {} end + + it 'skips cached merge requests' do + Gitlab::Cache::Import::Caching.set_add( + "github-importer/already-imported/#{project.id}/pull_requests_merged_by", + merge_request.id + ) + + expect(client).not_to receive(:pull_request) + + subject.each_object_to_import {} + end end end diff --git a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb index 81722c0eba7..63834cfdb94 100644 --- a/spec/lib/gitlab/github_import/representation/diff_note_spec.rb +++ b/spec/lib/gitlab/github_import/representation/diff_note_spec.rb @@ -2,23 +2,44 @@ require 'spec_helper' -RSpec.describe Gitlab::GithubImport::Representation::DiffNote do +RSpec.describe Gitlab::GithubImport::Representation::DiffNote, :clean_gitlab_redis_shared_state do let(:hunk) do '@@ -1 +1 @@ -Hello +Hello world' end + let(:merge_request) do + double( + :merge_request, + id: 54, + diff_refs: double( + :refs, + base_sha: 'base', + start_sha: 'start', + head_sha: 'head' + ) + ) + end + + let(:project) { double(:project, id: 836) } + let(:note_id) { 1 } + let(:in_reply_to_id) { nil } + let(:start_line) { nil } + let(:end_line) { 23 } + let(:note_body) { 'Hello world' } + let(:user_data) { { 'id' => 4, 'login' => 'alice' } } + let(:side) { 'RIGHT' } let(:created_at) { Time.new(2017, 1, 1, 12, 00) } let(:updated_at) { Time.new(2017, 1, 1, 12, 15) } - shared_examples 'a DiffNote' do + shared_examples 'a DiffNote representation' do it 'returns an instance of DiffNote' do expect(note).to be_an_instance_of(described_class) end context 'the returned DiffNote' do - it 'includes the number of the note' do + it 'includes the number of the merge request' do expect(note.noteable_id).to eq(42) end @@ -30,18 +51,6 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do expect(note.commit_id).to eq('123abc') end - it 'includes the user details' do - expect(note.author) - .to be_an_instance_of(Gitlab::GithubImport::Representation::User) - - expect(note.author.id).to eq(4) - expect(note.author.login).to eq('alice') - end - - it 'includes the note body' do - expect(note.note).to eq('Hello world') - end - it 'includes the created timestamp' do expect(note.created_at).to eq(created_at) end @@ -51,209 +60,250 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNote do end it 'includes the GitHub ID' do - expect(note.note_id).to eq(1) + expect(note.note_id).to eq(note_id) end it 'returns the noteable type' do expect(note.noteable_type).to eq('MergeRequest') end - end - end - - describe '.from_api_response' do - let(:response) do - double( - :response, - html_url: 'https://github.com/foo/bar/pull/42', - path: 'README.md', - commit_id: '123abc', - original_commit_id: 'original123abc', - diff_hunk: hunk, - user: double(:user, id: 4, login: 'alice'), - body: 'Hello world', - created_at: created_at, - updated_at: updated_at, - line: 23, - start_line: nil, - id: 1 - ) - end - - it_behaves_like 'a DiffNote' do - let(:note) { described_class.from_api_response(response) } - end - - it 'does not set the user if the response did not include a user' do - allow(response) - .to receive(:user) - .and_return(nil) - - note = described_class.from_api_response(response) - - expect(note.author).to be_nil - end - - it 'formats a suggestion in the note body' do - allow(response) - .to receive(:body) - .and_return <<~BODY - ```suggestion - Hello World - ``` - BODY - - note = described_class.from_api_response(response) - - expect(note.note).to eq <<~BODY - ```suggestion:-0+0 - Hello World - ``` - BODY - end - end - - describe '.from_json_hash' do - let(:hash) do - { - 'noteable_type' => 'MergeRequest', - 'noteable_id' => 42, - 'file_path' => 'README.md', - 'commit_id' => '123abc', - 'original_commit_id' => 'original123abc', - 'diff_hunk' => hunk, - 'author' => { 'id' => 4, 'login' => 'alice' }, - 'note' => 'Hello world', - 'created_at' => created_at.to_s, - 'updated_at' => updated_at.to_s, - 'note_id' => 1 - } - end - it_behaves_like 'a DiffNote' do - let(:note) { described_class.from_json_hash(hash) } - end - - it 'does not convert the author if it was not specified' do - hash.delete('author') - - note = described_class.from_json_hash(hash) + describe '#diff_hash' do + it 'returns a Hash containing the diff details' do + expect(note.diff_hash).to eq( + diff: hunk, + new_path: 'README.md', + old_path: 'README.md', + a_mode: '100644', + b_mode: '100644', + new_file: false + ) + end + end - expect(note.author).to be_nil - end + describe '#diff_position' do + before do + note.merge_request = double( + :merge_request, + diff_refs: double( + :refs, + base_sha: 'base', + start_sha: 'start', + head_sha: 'head' + ) + ) + end + + context 'when the diff is an addition' do + it 'returns a Gitlab::Diff::Position' do + expect(note.diff_position.to_h).to eq( + base_sha: 'base', + head_sha: 'head', + line_range: nil, + new_line: 23, + new_path: 'README.md', + old_line: nil, + old_path: 'README.md', + position_type: 'text', + start_sha: 'start' + ) + end + end + + context 'when the diff is an deletion' do + let(:side) { 'LEFT' } + + it 'returns a Gitlab::Diff::Position' do + expect(note.diff_position.to_h).to eq( + base_sha: 'base', + head_sha: 'head', + line_range: nil, + old_line: 23, + new_path: 'README.md', + new_line: nil, + old_path: 'README.md', + position_type: 'text', + start_sha: 'start' + ) + end + end + end - it 'formats a suggestion in the note body' do - hash['note'] = <<~BODY - ```suggestion - Hello World - ``` - BODY + describe '#discussion_id' do + before do + note.project = project + note.merge_request = merge_request + end + + context 'when the note is a reply to a discussion' do + it 'uses the cached value as the discussion_id only when responding an existing discussion' do + expect(Discussion) + .to receive(:discussion_id) + .and_return('FIRST_DISCUSSION_ID', 'SECOND_DISCUSSION_ID') + + # Creates the first discussion id and caches its value + expect(note.discussion_id) + .to eq('FIRST_DISCUSSION_ID') + + reply_note = described_class.from_json_hash( + 'note_id' => note.note_id + 1, + 'in_reply_to_id' => note.note_id + ) + reply_note.project = project + reply_note.merge_request = merge_request + + # Reading from the cached value + expect(reply_note.discussion_id) + .to eq('FIRST_DISCUSSION_ID') + + new_discussion_note = described_class.from_json_hash( + 'note_id' => note.note_id + 2, + 'in_reply_to_id' => nil + ) + new_discussion_note.project = project + new_discussion_note.merge_request = merge_request + + # Because it's a new discussion, it must not use the cached value + expect(new_discussion_note.discussion_id) + .to eq('SECOND_DISCUSSION_ID') + end + end + end - note = described_class.from_json_hash(hash) + describe '#github_identifiers' do + it 'returns a hash with needed identifiers' do + expect(note.github_identifiers).to eq( + noteable_id: 42, + noteable_type: 'MergeRequest', + note_id: 1 + ) + end + end - expect(note.note).to eq <<~BODY - ```suggestion:-0+0 - Hello World - ``` - BODY - end - end + describe '#line_code' do + it 'generates the proper line code' do + note = described_class.new(diff_hunk: hunk, file_path: 'README.md') - describe '#line_code' do - it 'returns a String' do - note = described_class.new(diff_hunk: hunk, file_path: 'README.md') + expect(note.line_code).to eq('8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_2_2') + end + end - expect(note.line_code).to be_an_instance_of(String) + describe '#note and #contains_suggestion?' do + it 'includes the note body' do + expect(note.note).to eq('Hello world') + expect(note.contains_suggestion?).to eq(false) + end + + context 'when the note have a suggestion' do + let(:note_body) do + <<~BODY + ```suggestion + Hello World + ``` + BODY + end + + it 'returns the suggestion formatted in the note' do + expect(note.note).to eq <<~BODY + ```suggestion:-0+0 + Hello World + ``` + BODY + expect(note.contains_suggestion?).to eq(true) + end + end + + context 'when the note have a multiline suggestion' do + let(:start_line) { 20 } + let(:end_line) { 23 } + let(:note_body) do + <<~BODY + ```suggestion + Hello World + ``` + BODY + end + + it 'returns the multi-line suggestion formatted in the note' do + expect(note.note).to eq <<~BODY + ```suggestion:-3+0 + Hello World + ``` + BODY + expect(note.contains_suggestion?).to eq(true) + end + end + + describe '#author' do + it 'includes the user details' do + expect(note.author).to be_an_instance_of( + Gitlab::GithubImport::Representation::User + ) + + expect(note.author.id).to eq(4) + expect(note.author.login).to eq('alice') + end + + context 'when the author is empty' do + let(:user_data) { nil } + + it 'does not set the user if the response did not include a user' do + expect(note.author).to be_nil + end + end + end + end end end - describe '#diff_hash' do - it 'returns a Hash containing the diff details' do - note = described_class.from_json_hash( - 'noteable_type' => 'MergeRequest', - 'noteable_id' => 42, - 'file_path' => 'README.md', - 'commit_id' => '123abc', - 'original_commit_id' => 'original123abc', - 'diff_hunk' => hunk, - 'author' => { 'id' => 4, 'login' => 'alice' }, - 'note' => 'Hello world', - 'created_at' => created_at.to_s, - 'updated_at' => updated_at.to_s, - 'note_id' => 1 - ) - - expect(note.diff_hash).to eq( - diff: hunk, - new_path: 'README.md', - old_path: 'README.md', - a_mode: '100644', - b_mode: '100644', - new_file: false - ) - end - end + describe '.from_api_response' do + it_behaves_like 'a DiffNote representation' do + let(:response) do + double( + :response, + id: note_id, + html_url: 'https://github.com/foo/bar/pull/42', + path: 'README.md', + commit_id: '123abc', + original_commit_id: 'original123abc', + side: side, + user: user_data && double(:user, user_data), + diff_hunk: hunk, + body: note_body, + created_at: created_at, + updated_at: updated_at, + line: end_line, + start_line: start_line, + in_reply_to_id: in_reply_to_id + ) + end - describe '#github_identifiers' do - it 'returns a hash with needed identifiers' do - github_identifiers = { - noteable_id: 42, - noteable_type: 'MergeRequest', - note_id: 1 - } - other_attributes = { something_else: '_something_else_' } - note = described_class.new(github_identifiers.merge(other_attributes)) - - expect(note.github_identifiers).to eq(github_identifiers) + subject(:note) { described_class.from_api_response(response) } end end - describe '#note' do - it 'returns the given note' do - hash = { - 'note': 'simple text' - } - - note = described_class.new(hash) - - expect(note.note).to eq 'simple text' - end - - it 'returns the suggestion formatted in the note' do - hash = { - 'note': <<~BODY - ```suggestion - Hello World - ``` - BODY - } - - note = described_class.new(hash) - - expect(note.note).to eq <<~BODY - ```suggestion:-0+0 - Hello World - ``` - BODY - end + describe '.from_json_hash' do + it_behaves_like 'a DiffNote representation' do + let(:hash) do + { + 'note_id' => note_id, + 'noteable_type' => 'MergeRequest', + 'noteable_id' => 42, + 'file_path' => 'README.md', + 'commit_id' => '123abc', + 'original_commit_id' => 'original123abc', + 'side' => side, + 'author' => user_data, + 'diff_hunk' => hunk, + 'note' => note_body, + 'created_at' => created_at.to_s, + 'updated_at' => updated_at.to_s, + 'end_line' => end_line, + 'start_line' => start_line, + 'in_reply_to_id' => in_reply_to_id + } + end - it 'returns the multi-line suggestion formatted in the note' do - hash = { - 'start_line': 20, - 'end_line': 23, - 'note': <<~BODY - ```suggestion - Hello World - ``` - BODY - } - - note = described_class.new(hash) - - expect(note.note).to eq <<~BODY - ```suggestion:-3+0 - Hello World - ``` - BODY + subject(:note) { described_class.from_json_hash(hash) } end end end diff --git a/spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb b/spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb index 2ffd5f50d3b..bcb8575bdbf 100644 --- a/spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb +++ b/spec/lib/gitlab/github_import/representation/diff_notes/suggestion_formatter_spec.rb @@ -9,13 +9,19 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note)).to eq(note) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(note) + expect(note_formatter.contains_suggestion?).to eq(false) end it 'handles nil value for note' do note = nil - expect(described_class.formatted_note_for(note: note)).to eq(note) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(note) + expect(note_formatter.contains_suggestion?).to eq(false) end it 'does not allow over 3 leading spaces for valid suggestion' do @@ -26,7 +32,10 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note)).to eq(note) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(note) + expect(note_formatter.contains_suggestion?).to eq(false) end it 'allows up to 3 leading spaces' do @@ -44,7 +53,10 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note)).to eq(expected) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(expected) + expect(note_formatter.contains_suggestion?).to eq(true) end it 'does nothing when there is any text without space after the suggestion tag' do @@ -53,7 +65,10 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note)).to eq(note) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(note) + expect(note_formatter.contains_suggestion?).to eq(false) end it 'formats single-line suggestions' do @@ -71,7 +86,10 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note)).to eq(expected) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(expected) + expect(note_formatter.contains_suggestion?).to eq(true) end it 'ignores text after suggestion tag on the same line' do @@ -89,7 +107,10 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note)).to eq(expected) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(expected) + expect(note_formatter.contains_suggestion?).to eq(true) end it 'formats multiple single-line suggestions' do @@ -115,7 +136,10 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note)).to eq(expected) + note_formatter = described_class.new(note: note) + + expect(note_formatter.formatted_note).to eq(expected) + expect(note_formatter.contains_suggestion?).to eq(true) end it 'formats multi-line suggestions' do @@ -133,7 +157,10 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note, start_line: 6, end_line: 8)).to eq(expected) + note_formatter = described_class.new(note: note, start_line: 6, end_line: 8) + + expect(note_formatter.formatted_note).to eq(expected) + expect(note_formatter.contains_suggestion?).to eq(true) end it 'formats multiple multi-line suggestions' do @@ -159,6 +186,9 @@ RSpec.describe Gitlab::GithubImport::Representation::DiffNotes::SuggestionFormat ``` BODY - expect(described_class.formatted_note_for(note: note, start_line: 6, end_line: 8)).to eq(expected) + note_formatter = described_class.new(note: note, start_line: 6, end_line: 8) + + expect(note_formatter.formatted_note).to eq(expected) + expect(note_formatter.contains_suggestion?).to eq(true) end end diff --git a/spec/lib/gitlab/gpg/commit_spec.rb b/spec/lib/gitlab/gpg/commit_spec.rb index 55102554508..20d5972bd88 100644 --- a/spec/lib/gitlab/gpg/commit_spec.rb +++ b/spec/lib/gitlab/gpg/commit_spec.rb @@ -136,7 +136,7 @@ RSpec.describe Gitlab::Gpg::Commit do it 'returns a valid signature' do verified_signature = double('verified-signature', fingerprint: GpgHelpers::User1.fingerprint, valid?: true) allow(GPGME::Crypto).to receive(:new).and_return(crypto) - allow(crypto).to receive(:verify).and_return(verified_signature) + allow(crypto).to receive(:verify).and_yield(verified_signature) signature = described_class.new(commit).signature @@ -178,7 +178,7 @@ RSpec.describe Gitlab::Gpg::Commit do keyid = GpgHelpers::User1.fingerprint.last(16) verified_signature = double('verified-signature', fingerprint: keyid, valid?: true) allow(GPGME::Crypto).to receive(:new).and_return(crypto) - allow(crypto).to receive(:verify).and_return(verified_signature) + allow(crypto).to receive(:verify).and_yield(verified_signature) signature = described_class.new(commit).signature @@ -194,6 +194,71 @@ RSpec.describe Gitlab::Gpg::Commit do end end + context 'commit with multiple signatures' do + let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User1.emails.first } + + let!(:user) { create(:user, email: GpgHelpers::User1.emails.first) } + + let!(:gpg_key) do + create :gpg_key, key: GpgHelpers::User1.public_key, user: user + end + + let!(:crypto) { instance_double(GPGME::Crypto) } + + before do + fake_signature = [ + GpgHelpers::User1.signed_commit_signature, + GpgHelpers::User1.signed_commit_base_data + ] + + allow(Gitlab::Git::Commit).to receive(:extract_signature_lazily) + .with(Gitlab::Git::Repository, commit_sha) + .and_return(fake_signature) + end + + it 'returns an invalid signatures error' do + verified_signature = double('verified-signature', fingerprint: GpgHelpers::User1.fingerprint, valid?: true) + allow(GPGME::Crypto).to receive(:new).and_return(crypto) + allow(crypto).to receive(:verify).and_yield(verified_signature).and_yield(verified_signature) + + signature = described_class.new(commit).signature + + expect(signature).to have_attributes( + commit_sha: commit_sha, + project: project, + gpg_key: gpg_key, + gpg_key_primary_keyid: GpgHelpers::User1.primary_keyid, + gpg_key_user_name: GpgHelpers::User1.names.first, + gpg_key_user_email: GpgHelpers::User1.emails.first, + verification_status: 'multiple_signatures' + ) + end + + context 'when feature flag is disabled' do + before do + stub_feature_flags(multiple_gpg_signatures: false) + end + + it 'returns an valid signature' do + verified_signature = double('verified-signature', fingerprint: GpgHelpers::User1.fingerprint, valid?: true) + allow(GPGME::Crypto).to receive(:new).and_return(crypto) + allow(crypto).to receive(:verify).and_yield(verified_signature).and_yield(verified_signature) + + signature = described_class.new(commit).signature + + expect(signature).to have_attributes( + commit_sha: commit_sha, + project: project, + gpg_key: gpg_key, + gpg_key_primary_keyid: GpgHelpers::User1.primary_keyid, + gpg_key_user_name: GpgHelpers::User1.names.first, + gpg_key_user_email: GpgHelpers::User1.emails.first, + verification_status: 'verified' + ) + end + end + end + context 'commit signed with a subkey' do let!(:commit) { create :commit, project: project, sha: commit_sha, committer_email: GpgHelpers::User3.emails.first } diff --git a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb index c1516a48b80..771f6e1ec46 100644 --- a/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb +++ b/spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb @@ -140,6 +140,8 @@ RSpec.describe Gitlab::Gpg::InvalidGpgSignatureUpdater do key: GpgHelpers::User1.public_key, user: user + user.reload # necessary to reload the association with gpg_keys + expect(invalid_gpg_signature.reload.verification_status).to eq 'unverified_key' # InvalidGpgSignatureUpdater is called by the after_update hook diff --git a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb index 641fb27a071..ef4bc0ca104 100644 --- a/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb +++ b/spec/lib/gitlab/grape_logging/loggers/perf_logger_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::GrapeLogging::Loggers::PerfLogger do - let(:mock_request) { OpenStruct.new(env: {}) } + let(:mock_request) { double('env', env: {}) } describe ".parameters" do subject { described_class.new.parameters(mock_request, nil) } diff --git a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb index 9538c4bae2b..4cd9f9dfad0 100644 --- a/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb +++ b/spec/lib/gitlab/grape_logging/loggers/queue_duration_logger_spec.rb @@ -9,7 +9,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do let(:start_time) { Time.new(2018, 01, 01) } describe 'when no proxy time is available' do - let(:mock_request) { OpenStruct.new(env: {}) } + let(:mock_request) { double('env', env: {}) } it 'returns an empty hash' do expect(subject.parameters(mock_request, nil)).to eq({}) @@ -18,7 +18,7 @@ RSpec.describe Gitlab::GrapeLogging::Loggers::QueueDurationLogger do describe 'when a proxy time is available' do let(:mock_request) do - OpenStruct.new( + double('env', env: { 'HTTP_GITLAB_WORKHORSE_PROXY_START' => (start_time - 1.hour).to_i * (10**9) } diff --git a/spec/lib/gitlab/grape_logging/loggers/urgency_logger_spec.rb b/spec/lib/gitlab/grape_logging/loggers/urgency_logger_spec.rb new file mode 100644 index 00000000000..464534f0271 --- /dev/null +++ b/spec/lib/gitlab/grape_logging/loggers/urgency_logger_spec.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::GrapeLogging::Loggers::UrgencyLogger do + def endpoint(options, namespace: '') + Struct.new(:options, :namespace).new(options, namespace) + end + + let(:api_class) do + Class.new(API::Base) do + namespace 'testing' do + # rubocop:disable Rails/HttpPositionalArguments + # This is not the get that performs a request, but the one from Grape + get 'test', urgency: :high do + {} + end + # rubocop:enable Rails/HttpPositionalArguments + end + end + end + + describe ".parameters" do + where(:request_env, :expected_parameters) do + [ + [{}, {}], + [{ 'api.endpoint' => endpoint({}) }, {}], + [{ 'api.endpoint' => endpoint({ for: 'something weird' }) }, {}], + [ + { 'api.endpoint' => endpoint({ for: api_class, path: [] }) }, + { request_urgency: :default, target_duration_s: 1 } + ], + [ + { 'api.endpoint' => endpoint({ for: api_class, path: ['test'] }, namespace: '/testing') }, + { request_urgency: :high, target_duration_s: 0.25 } + ] + ] + end + + with_them do + let(:request) { double('request', env: request_env) } + + subject { described_class.new.parameters(request, nil) } + + it { is_expected.to eq(expected_parameters) } + end + end +end diff --git a/spec/lib/gitlab/graphql/known_operations_spec.rb b/spec/lib/gitlab/graphql/known_operations_spec.rb new file mode 100644 index 00000000000..411c0876f82 --- /dev/null +++ b/spec/lib/gitlab/graphql/known_operations_spec.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' +require 'rspec-parameterized' +require "support/graphql/fake_query_type" + +RSpec.describe Gitlab::Graphql::KnownOperations do + using RSpec::Parameterized::TableSyntax + + # Include duplicated operation names to test that we are unique-ifying them + let(:fake_operations) { %w(foo foo bar bar) } + let(:fake_schema) do + Class.new(GraphQL::Schema) do + query Graphql::FakeQueryType + end + end + + subject { described_class.new(fake_operations) } + + describe "#from_query" do + where(:query_string, :expected) do + "query { helloWorld }" | described_class::ANONYMOUS + "query fuzzyyy { helloWorld }" | described_class::UNKNOWN + "query foo { helloWorld }" | described_class::Operation.new("foo") + end + + with_them do + it "returns known operation name from GraphQL Query" do + query = ::GraphQL::Query.new(fake_schema, query_string) + + expect(subject.from_query(query)).to eq(expected) + end + end + end + + describe "#operations" do + it "returns array of known operations" do + expect(subject.operations.map(&:name)).to match_array(%w(anonymous unknown foo bar)) + end + end + + describe "Operation#to_caller_id" do + where(:query_string, :expected) do + "query { helloWorld }" | "graphql:#{described_class::ANONYMOUS.name}" + "query foo { helloWorld }" | "graphql:foo" + end + + with_them do + it "formats operation name for caller_id metric property" do + query = ::GraphQL::Query.new(fake_schema, query_string) + + expect(subject.from_query(query).to_caller_id).to eq(expected) + end + end + end + + describe "Opeartion#query_urgency" do + it "returns the associated query urgency" do + query = ::GraphQL::Query.new(fake_schema, "query foo { helloWorld }") + + expect(subject.from_query(query).query_urgency).to equal(::Gitlab::EndpointAttributes::DEFAULT_URGENCY) + end + end + + describe ".default" do + it "returns a memoization of values from webpack", :aggregate_failures do + # .default could have been referenced in another spec, so we need to clean it up here + described_class.instance_variable_set(:@default, nil) + + expect(Gitlab::Webpack::GraphqlKnownOperations).to receive(:load).once.and_return(fake_operations) + + 2.times { described_class.default } + + # Uses reference equality to verify memoization + expect(described_class.default).to equal(described_class.default) + expect(described_class.default).to be_a(described_class) + expect(described_class.default.operations.map(&:name)).to include(*fake_operations) + end + end +end diff --git a/spec/lib/gitlab/graphql/pagination/connections_spec.rb b/spec/lib/gitlab/graphql/pagination/connections_spec.rb index f3f59113c81..97389b6250e 100644 --- a/spec/lib/gitlab/graphql/pagination/connections_spec.rb +++ b/spec/lib/gitlab/graphql/pagination/connections_spec.rb @@ -8,7 +8,7 @@ RSpec.describe ::Gitlab::Graphql::Pagination::Connections do before(:all) do ActiveRecord::Schema.define do - create_table :testing_pagination_nodes, force: true do |t| + create_table :_test_testing_pagination_nodes, force: true do |t| t.integer :value, null: false end end @@ -16,13 +16,13 @@ RSpec.describe ::Gitlab::Graphql::Pagination::Connections do after(:all) do ActiveRecord::Schema.define do - drop_table :testing_pagination_nodes, force: true + drop_table :_test_testing_pagination_nodes, force: true end end let_it_be(:node_model) do Class.new(ActiveRecord::Base) do - self.table_name = 'testing_pagination_nodes' + self.table_name = '_test_testing_pagination_nodes' end end diff --git a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb index fc723138d88..dee8f9e3c64 100644 --- a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb +++ b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb @@ -18,12 +18,6 @@ RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do GRAPHQL end - describe 'variables' do - subject { initial_value.fetch(:variables) } - - it { is_expected.to eq('{:body=>"[FILTERED]"}') } - end - describe '#final_value' do let(:monotonic_time_before) { 42 } let(:monotonic_time_after) { 500 } @@ -42,7 +36,14 @@ RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do it 'inserts duration in seconds to memo and sets request store' do expect { final_value }.to change { memo[:duration_s] }.to(monotonic_time_duration) - .and change { RequestStore.store[:graphql_logs] }.to([memo]) + .and change { RequestStore.store[:graphql_logs] }.to([{ + complexity: 4, + depth: 2, + operation_name: query.operation_name, + used_deprecated_fields: [], + used_fields: [], + variables: { body: "[FILTERED]" }.to_s + }]) end end end diff --git a/spec/lib/gitlab/graphql/tracers/application_context_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/application_context_tracer_spec.rb new file mode 100644 index 00000000000..6eff816b95a --- /dev/null +++ b/spec/lib/gitlab/graphql/tracers/application_context_tracer_spec.rb @@ -0,0 +1,43 @@ +# frozen_string_literal: true +require "fast_spec_helper" +require "support/graphql/fake_tracer" +require "support/graphql/fake_query_type" + +RSpec.describe Gitlab::Graphql::Tracers::ApplicationContextTracer do + let(:tracer_spy) { spy('tracer_spy') } + let(:default_known_operations) { ::Gitlab::Graphql::KnownOperations.new(['fooOperation']) } + let(:dummy_schema) do + schema = Class.new(GraphQL::Schema) do + use Gitlab::Graphql::Tracers::ApplicationContextTracer + + query Graphql::FakeQueryType + end + + fake_tracer = Graphql::FakeTracer.new(lambda do |key, *args| + tracer_spy.trace(key, Gitlab::ApplicationContext.current) + end) + + schema.tracer(fake_tracer) + + schema + end + + before do + allow(::Gitlab::Graphql::KnownOperations).to receive(:default).and_return(default_known_operations) + end + + it "sets application context during execute_query and cleans up afterwards", :aggregate_failures do + dummy_schema.execute("query fooOperation { helloWorld }") + + # "parse" is just an arbitrary trace event that isn't setting caller_id + expect(tracer_spy).to have_received(:trace).with("parse", hash_excluding("meta.caller_id")) + expect(tracer_spy).to have_received(:trace).with("execute_query", hash_including("meta.caller_id" => "graphql:fooOperation")).once + expect(Gitlab::ApplicationContext.current).not_to include("meta.caller_id") + end + + it "sets caller_id when operation is not known" do + dummy_schema.execute("query fuzz { helloWorld }") + + expect(tracer_spy).to have_received(:trace).with("execute_query", hash_including("meta.caller_id" => "graphql:unknown")).once + end +end diff --git a/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb new file mode 100644 index 00000000000..d83ac4dabc5 --- /dev/null +++ b/spec/lib/gitlab/graphql/tracers/logger_tracer_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true +require "fast_spec_helper" +require "support/graphql/fake_query_type" + +RSpec.describe Gitlab::Graphql::Tracers::LoggerTracer do + let(:dummy_schema) do + Class.new(GraphQL::Schema) do + # LoggerTracer depends on TimerTracer + use Gitlab::Graphql::Tracers::LoggerTracer + use Gitlab::Graphql::Tracers::TimerTracer + + query_analyzer Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer.new + + query Graphql::FakeQueryType + end + end + + around do |example| + Gitlab::ApplicationContext.with_context(caller_id: 'caller_a', feature_category: 'feature_a') do + example.run + end + end + + it "logs every query", :aggregate_failures do + variables = { name: "Ada Lovelace" } + query_string = 'query fooOperation($name: String) { helloWorld(message: $name) }' + + # Build an actual query so we don't have to hardocde the "fingerprint" calculations + query = GraphQL::Query.new(dummy_schema, query_string, variables: variables) + + expect(::Gitlab::GraphqlLogger).to receive(:info).with({ + "correlation_id" => anything, + "meta.caller_id" => "caller_a", + "meta.feature_category" => "feature_a", + "query_analysis.duration_s" => kind_of(Numeric), + "query_analysis.complexity" => 1, + "query_analysis.depth" => 1, + "query_analysis.used_deprecated_fields" => [], + "query_analysis.used_fields" => ["FakeQuery.helloWorld"], + duration_s: be > 0, + is_mutation: false, + operation_fingerprint: query.operation_fingerprint, + operation_name: 'fooOperation', + query_fingerprint: query.fingerprint, + query_string: query_string, + trace_type: "execute_query", + variables: variables.to_s + }) + + dummy_schema.execute(query_string, variables: variables) + end +end diff --git a/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb new file mode 100644 index 00000000000..ff6a76aa319 --- /dev/null +++ b/spec/lib/gitlab/graphql/tracers/metrics_tracer_spec.rb @@ -0,0 +1,60 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' +require 'rspec-parameterized' +require "support/graphql/fake_query_type" + +RSpec.describe Gitlab::Graphql::Tracers::MetricsTracer do + using RSpec::Parameterized::TableSyntax + + let(:default_known_operations) { ::Gitlab::Graphql::KnownOperations.new(%w(lorem foo bar)) } + + let(:fake_schema) do + Class.new(GraphQL::Schema) do + use Gitlab::Graphql::Tracers::ApplicationContextTracer + use Gitlab::Graphql::Tracers::MetricsTracer + use Gitlab::Graphql::Tracers::TimerTracer + + query Graphql::FakeQueryType + end + end + + around do |example| + ::Gitlab::ApplicationContext.with_context(feature_category: 'test_feature_category') do + example.run + end + end + + before do + allow(::Gitlab::Graphql::KnownOperations).to receive(:default).and_return(default_known_operations) + end + + describe 'when used as tracer and query is executed' do + where(:duration, :expected_success) do + 0.1 | true + 0.1 + ::Gitlab::EndpointAttributes::DEFAULT_URGENCY.duration | false + end + + with_them do + it 'increments sli' do + # Trigger initialization + fake_schema + + # setup timer + current_time = 0 + allow(Gitlab::Metrics::System).to receive(:monotonic_time) { current_time += duration } + + expect(Gitlab::Metrics::RailsSlis.graphql_query_apdex).to receive(:increment).with( + labels: { + endpoint_id: 'graphql:lorem', + feature_category: 'test_feature_category', + query_urgency: ::Gitlab::EndpointAttributes::DEFAULT_URGENCY.name + }, + success: expected_success + ) + + fake_schema.execute("query lorem { helloWorld }") + end + end + end +end diff --git a/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb new file mode 100644 index 00000000000..7f837e28772 --- /dev/null +++ b/spec/lib/gitlab/graphql/tracers/timer_tracer_spec.rb @@ -0,0 +1,44 @@ +# frozen_string_literal: true +require "fast_spec_helper" +require "support/graphql/fake_tracer" +require "support/graphql/fake_query_type" + +RSpec.describe Gitlab::Graphql::Tracers::TimerTracer do + let(:expected_duration) { 5 } + let(:tracer_spy) { spy('tracer_spy') } + let(:dummy_schema) do + schema = Class.new(GraphQL::Schema) do + use Gitlab::Graphql::Tracers::TimerTracer + + query Graphql::FakeQueryType + end + + schema.tracer(Graphql::FakeTracer.new(lambda { |*args| tracer_spy.trace(*args) })) + + schema + end + + before do + current_time = 0 + allow(Gitlab::Metrics::System).to receive(:monotonic_time) do + current_time += expected_duration + end + end + + it "adds duration_s to the trace metadata", :aggregate_failures do + query_string = "query fooOperation { helloWorld }" + + dummy_schema.execute(query_string) + + # "parse" and "execute_query" are just arbitrary trace events + expect(tracer_spy).to have_received(:trace).with("parse", { + duration_s: expected_duration, + query_string: query_string + }) + expect(tracer_spy).to have_received(:trace).with("execute_query", { + # greater than expected duration because other calls made to `.monotonic_time` are outside our control + duration_s: be >= expected_duration, + query: instance_of(GraphQL::Query) + }) + end +end diff --git a/spec/lib/gitlab/health_checks/redis/redis_check_spec.rb b/spec/lib/gitlab/health_checks/redis/redis_check_spec.rb index 43e890a6c4f..145d573b6de 100644 --- a/spec/lib/gitlab/health_checks/redis/redis_check_spec.rb +++ b/spec/lib/gitlab/health_checks/redis/redis_check_spec.rb @@ -4,5 +4,5 @@ require 'spec_helper' require_relative '../simple_check_shared' RSpec.describe Gitlab::HealthChecks::Redis::RedisCheck do - include_examples 'simple_check', 'redis_ping', 'Redis', 'PONG' + include_examples 'simple_check', 'redis_ping', 'Redis', true end diff --git a/spec/lib/gitlab/import/database_helpers_spec.rb b/spec/lib/gitlab/import/database_helpers_spec.rb index 079faed2518..05d1c0ae078 100644 --- a/spec/lib/gitlab/import/database_helpers_spec.rb +++ b/spec/lib/gitlab/import/database_helpers_spec.rb @@ -16,8 +16,8 @@ RSpec.describe Gitlab::Import::DatabaseHelpers do let(:project) { create(:project) } it 'returns the ID returned by the query' do - expect(Gitlab::Database.main) - .to receive(:bulk_insert) + expect(ApplicationRecord) + .to receive(:legacy_bulk_insert) .with(Issue.table_name, [attributes], return_ids: true) .and_return([10]) diff --git a/spec/lib/gitlab/import/metrics_spec.rb b/spec/lib/gitlab/import/metrics_spec.rb index 035294a620f..9b8b58d00f3 100644 --- a/spec/lib/gitlab/import/metrics_spec.rb +++ b/spec/lib/gitlab/import/metrics_spec.rb @@ -94,20 +94,6 @@ RSpec.describe Gitlab::Import::Metrics, :aggregate_failures do expect(histogram).to have_received(:observe).with({ importer: :test_importer }, anything) end end - - context 'when project is a github import' do - before do - project.import_type = 'github' - end - - it 'emits importer metrics' do - expect(subject).to receive(:track_usage_event).with(:github_import_project_success, project.id) - - subject.track_finished_import - - expect(histogram).to have_received(:observe).with({ project: project.full_path }, anything) - end - end end describe '#issues_counter' do diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml index 10f0e687077..b474f5825fd 100644 --- a/spec/lib/gitlab/import_export/all_models.yml +++ b/spec/lib/gitlab/import_export/all_models.yml @@ -60,6 +60,7 @@ issues: - incident_management_issuable_escalation_status - pending_escalations - customer_relations_contacts +- issue_customer_relations_contacts work_item_type: - issues events: @@ -132,6 +133,7 @@ project_members: - user - source - project +- member_task merge_requests: - status_check_responses - subscriptions @@ -382,6 +384,7 @@ project: - emails_on_push_integration - pipelines_email_integration - mattermost_slash_commands_integration +- shimo_integration - slack_slash_commands_integration - irker_integration - packagist_integration diff --git a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb index 2b974f8985d..8ae387d95e3 100644 --- a/spec/lib/gitlab/import_export/attributes_permitter_spec.rb +++ b/spec/lib/gitlab/import_export/attributes_permitter_spec.rb @@ -80,25 +80,66 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do let(:attributes_permitter) { described_class.new } - where(:relation_name, :permitted_attributes_defined) do - :user | false - :author | false - :ci_cd_settings | true - :metrics_setting | true - :project_badges | true - :pipeline_schedules | true - :error_tracking_setting | true - :auto_devops | true - :boards | true - :custom_attributes | true - :labels | true - :protected_branches | true - :protected_tags | true - :create_access_levels | true - :merge_access_levels | true - :push_access_levels | true - :releases | true - :links | true + where(:relation_name, :permitted_attributes_defined ) do + :user | true + :author | false + :ci_cd_settings | true + :metrics_setting | true + :project_badges | true + :pipeline_schedules | true + :error_tracking_setting | true + :auto_devops | true + :boards | true + :custom_attributes | true + :label | true + :labels | true + :protected_branches | true + :protected_tags | true + :create_access_levels | true + :merge_access_levels | true + :push_access_levels | true + :releases | true + :links | true + :priorities | true + :milestone | true + :milestones | true + :snippets | true + :project_members | true + :merge_request | true + :merge_requests | true + :award_emoji | true + :commit_author | true + :committer | true + :events | true + :label_links | true + :merge_request_diff | true + :merge_request_diff_commits | true + :merge_request_diff_files | true + :metrics | true + :notes | true + :push_event_payload | true + :resource_label_events | true + :suggestions | true + :system_note_metadata | true + :timelogs | true + :container_expiration_policy | true + :project_feature | true + :prometheus_metrics | true + :service_desk_setting | true + :external_pull_request | true + :external_pull_requests | true + :statuses | true + :ci_pipelines | true + :stages | true + :actions | true + :design | true + :designs | true + :design_versions | true + :issue_assignees | true + :sentry_issue | true + :zoom_meetings | true + :issues | true + :group_members | true end with_them do @@ -109,9 +150,11 @@ RSpec.describe Gitlab::ImportExport::AttributesPermitter do describe 'included_attributes for Project' do subject { described_class.new } + additional_attributes = { user: %w[id] } + Gitlab::ImportExport::Config.new.to_h[:included_attributes].each do |relation_sym, permitted_attributes| context "for #{relation_sym}" do - it_behaves_like 'a permitted attribute', relation_sym, permitted_attributes + it_behaves_like 'a permitted attribute', relation_sym, permitted_attributes, additional_attributes[relation_sym] end end end diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb index fc08a13a8bd..d5f31f235f5 100644 --- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb +++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb @@ -207,9 +207,9 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do context 'relation ordering' do it 'orders exported pipelines by primary key' do - expected_order = project.ci_pipelines.reorder(:id).ids + expected_order = project.ci_pipelines.reorder(:id).pluck(:sha) - expect(subject['ci_pipelines'].pluck('id')).to eq(expected_order) + expect(subject['ci_pipelines'].pluck('sha')).to eq(expected_order) end end diff --git a/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb new file mode 100644 index 00000000000..473dbf5ecc5 --- /dev/null +++ b/spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb @@ -0,0 +1,88 @@ +# frozen_string_literal: true + +# This spec is a lightweight version of: +# * project/tree_restorer_spec.rb +# +# In depth testing is being done in the above specs. +# This spec tests that restore project works +# but does not have 100% relation coverage. + +require 'spec_helper' + +RSpec.describe Gitlab::ImportExport::Group::RelationTreeRestorer do + let_it_be(:group) { create(:group) } + let_it_be(:importable) { create(:group, parent: group) } + + include_context 'relation tree restorer shared context' do + let(:importable_name) { nil } + end + + let(:path) { 'spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json' } + let(:relation_reader) do + Gitlab::ImportExport::Json::LegacyReader::File.new( + path, + relation_names: reader.group_relation_names) + end + + let(:reader) do + Gitlab::ImportExport::Reader.new( + shared: shared, + config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.legacy_group_config_file).to_h + ) + end + + let(:relation_tree_restorer) do + described_class.new( + user: user, + shared: shared, + relation_reader: relation_reader, + object_builder: Gitlab::ImportExport::Group::ObjectBuilder, + members_mapper: members_mapper, + relation_factory: Gitlab::ImportExport::Group::RelationFactory, + reader: reader, + importable: importable, + importable_path: nil, + importable_attributes: attributes + ) + end + + subject { relation_tree_restorer.restore } + + shared_examples 'logging of relations creation' do + context 'when log_import_export_relation_creation feature flag is enabled' do + before do + stub_feature_flags(log_import_export_relation_creation: group) + end + + it 'logs top-level relation creation' do + expect(shared.logger) + .to receive(:info) + .with(hash_including(message: '[Project/Group Import] Created new object relation')) + .at_least(:once) + + subject + end + end + + context 'when log_import_export_relation_creation feature flag is disabled' do + before do + stub_feature_flags(log_import_export_relation_creation: false) + end + + it 'does not log top-level relation creation' do + expect(shared.logger) + .to receive(:info) + .with(hash_including(message: '[Project/Group Import] Created new object relation')) + .never + + subject + end + end + end + + it 'restores group tree' do + expect(subject).to eq(true) + end + + include_examples 'logging of relations creation' +end diff --git a/spec/lib/gitlab/import_export/project/object_builder_spec.rb b/spec/lib/gitlab/import_export/project/object_builder_spec.rb index 4c9f9f7c690..189b798c2e8 100644 --- a/spec/lib/gitlab/import_export/project/object_builder_spec.rb +++ b/spec/lib/gitlab/import_export/project/object_builder_spec.rb @@ -123,6 +123,24 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do expect(milestone.persisted?).to be true end + + context 'with clashing iid' do + it 'creates milestone and claims iid for the new milestone' do + clashing_iid = 1 + create(:milestone, iid: clashing_iid, project: project) + + milestone = described_class.build(Milestone, + 'iid' => clashing_iid, + 'title' => 'milestone', + 'project' => project, + 'group' => nil, + 'group_id' => nil) + + expect(milestone.persisted?).to be true + expect(Milestone.count).to eq(2) + expect(milestone.iid).to eq(clashing_iid) + end + end end context 'merge_request' do @@ -176,4 +194,118 @@ RSpec.describe Gitlab::ImportExport::Project::ObjectBuilder do expect(found.email).to eq('alice@example.com') end end + + context 'merge request diff commits' do + context 'when the "committer" object is present' do + it 'uses this object as the committer' do + user = MergeRequest::DiffCommitUser + .find_or_create('Alice', 'alice@example.com') + + commit = described_class.build( + MergeRequestDiffCommit, + { + 'committer' => user, + 'committer_name' => 'Bla', + 'committer_email' => 'bla@example.com', + 'author_name' => 'Bla', + 'author_email' => 'bla@example.com' + } + ) + + expect(commit.committer).to eq(user) + end + end + + context 'when the "committer" object is missing' do + it 'creates one from the committer name and Email' do + commit = described_class.build( + MergeRequestDiffCommit, + { + 'committer_name' => 'Alice', + 'committer_email' => 'alice@example.com', + 'author_name' => 'Alice', + 'author_email' => 'alice@example.com' + } + ) + + expect(commit.committer.name).to eq('Alice') + expect(commit.committer.email).to eq('alice@example.com') + end + end + + context 'when the "commit_author" object is present' do + it 'uses this object as the author' do + user = MergeRequest::DiffCommitUser + .find_or_create('Alice', 'alice@example.com') + + commit = described_class.build( + MergeRequestDiffCommit, + { + 'committer_name' => 'Alice', + 'committer_email' => 'alice@example.com', + 'commit_author' => user, + 'author_name' => 'Bla', + 'author_email' => 'bla@example.com' + } + ) + + expect(commit.commit_author).to eq(user) + end + end + + context 'when the "commit_author" object is missing' do + it 'creates one from the author name and Email' do + commit = described_class.build( + MergeRequestDiffCommit, + { + 'committer_name' => 'Alice', + 'committer_email' => 'alice@example.com', + 'author_name' => 'Alice', + 'author_email' => 'alice@example.com' + } + ) + + expect(commit.commit_author.name).to eq('Alice') + expect(commit.commit_author.email).to eq('alice@example.com') + end + end + end + + describe '#find_or_create_diff_commit_user' do + context 'when the user already exists' do + it 'returns the existing user' do + user = MergeRequest::DiffCommitUser + .find_or_create('Alice', 'alice@example.com') + + found = described_class + .new(MergeRequestDiffCommit, {}) + .send(:find_or_create_diff_commit_user, user.name, user.email) + + expect(found).to eq(user) + end + end + + context 'when the user does not exist' do + it 'creates the user' do + found = described_class + .new(MergeRequestDiffCommit, {}) + .send(:find_or_create_diff_commit_user, 'Alice', 'alice@example.com') + + expect(found.name).to eq('Alice') + expect(found.email).to eq('alice@example.com') + end + end + + it 'caches the results' do + builder = described_class.new(MergeRequestDiffCommit, {}) + + builder.send(:find_or_create_diff_commit_user, 'Alice', 'alice@example.com') + + record = ActiveRecord::QueryRecorder.new do + builder.send(:find_or_create_diff_commit_user, 'Alice', 'alice@example.com') + end + + expect(record.count).to eq(1) + end + end end diff --git a/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb new file mode 100644 index 00000000000..5ebace263ba --- /dev/null +++ b/spec/lib/gitlab/import_export/project/relation_tree_restorer_spec.rb @@ -0,0 +1,150 @@ +# frozen_string_literal: true + +# This spec is a lightweight version of: +# * project/tree_restorer_spec.rb +# +# In depth testing is being done in the above specs. +# This spec tests that restore project works +# but does not have 100% relation coverage. + +require 'spec_helper' + +RSpec.describe Gitlab::ImportExport::Project::RelationTreeRestorer do + let_it_be(:importable, reload: true) do + create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') + end + + include_context 'relation tree restorer shared context' do + let(:importable_name) { 'project' } + end + + let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) } + let(:relation_tree_restorer) do + described_class.new( + user: user, + shared: shared, + relation_reader: relation_reader, + object_builder: Gitlab::ImportExport::Project::ObjectBuilder, + members_mapper: members_mapper, + relation_factory: Gitlab::ImportExport::Project::RelationFactory, + reader: reader, + importable: importable, + importable_path: 'project', + importable_attributes: attributes + ) + end + + subject { relation_tree_restorer.restore } + + shared_examples 'import project successfully' do + describe 'imported project' do + it 'has the project attributes and relations', :aggregate_failures do + expect(subject).to eq(true) + + project = Project.find_by_path('project') + + expect(project.description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.') + expect(project.labels.count).to eq(3) + expect(project.boards.count).to eq(1) + expect(project.project_feature).not_to be_nil + expect(project.custom_attributes.count).to eq(2) + expect(project.project_badges.count).to eq(2) + expect(project.snippets.count).to eq(1) + end + end + end + + shared_examples 'logging of relations creation' do + context 'when log_import_export_relation_creation feature flag is enabled' do + before do + stub_feature_flags(log_import_export_relation_creation: group) + end + + it 'logs top-level relation creation' do + expect(shared.logger) + .to receive(:info) + .with(hash_including(message: '[Project/Group Import] Created new object relation')) + .at_least(:once) + + subject + end + end + + context 'when log_import_export_relation_creation feature flag is disabled' do + before do + stub_feature_flags(log_import_export_relation_creation: false) + end + + it 'does not log top-level relation creation' do + expect(shared.logger) + .to receive(:info) + .with(hash_including(message: '[Project/Group Import] Created new object relation')) + .never + + subject + end + end + end + + context 'with legacy reader' do + let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' } + let(:relation_reader) do + Gitlab::ImportExport::Json::LegacyReader::File.new( + path, + relation_names: reader.project_relation_names, + allowed_path: 'project' + ) + end + + let(:attributes) { relation_reader.consume_attributes('project') } + + it_behaves_like 'import project successfully' + + context 'with logging of relations creation' do + let_it_be(:group) { create(:group) } + let_it_be(:importable) do + create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project', group: group) + end + + include_examples 'logging of relations creation' + end + end + + context 'with ndjson reader' do + let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/tree' } + let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } + + it_behaves_like 'import project successfully' + + context 'when inside a group' do + let_it_be(:group) do + create(:group, :disabled_and_unoverridable) + end + + before do + importable.update!(shared_runners_enabled: false, group: group) + end + + it_behaves_like 'import project successfully' + end + end + + context 'with invalid relations' do + let(:path) { 'spec/fixtures/lib/gitlab/import_export/project_with_invalid_relations/tree' } + let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } + + it 'logs the invalid relation and its errors' do + expect(shared.logger) + .to receive(:warn) + .with( + error_messages: "Title can't be blank. Title is invalid", + message: '[Project/Group Import] Invalid object relation built', + relation_class: 'ProjectLabel', + relation_index: 0, + relation_key: 'labels' + ).once + + relation_tree_restorer.restore + end + end +end diff --git a/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb index f6a028383f2..3dab84af744 100644 --- a/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/sample/relation_tree_restorer_spec.rb @@ -10,19 +10,26 @@ require 'spec_helper' RSpec.describe Gitlab::ImportExport::Project::Sample::RelationTreeRestorer do - include_context 'relation tree restorer shared context' + let_it_be(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') } + include_context 'relation tree restorer shared context' do + let(:importable_name) { 'project' } + end + + let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) } + let(:path) { 'spec/fixtures/lib/gitlab/import_export/sample_data/tree' } + let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } let(:sample_data_relation_tree_restorer) do described_class.new( user: user, shared: shared, relation_reader: relation_reader, - object_builder: object_builder, + object_builder: Gitlab::ImportExport::Project::ObjectBuilder, members_mapper: members_mapper, - relation_factory: relation_factory, + relation_factory: Gitlab::ImportExport::Project::Sample::RelationFactory, reader: reader, importable: importable, - importable_path: importable_path, + importable_path: 'project', importable_attributes: attributes ) end @@ -69,32 +76,21 @@ RSpec.describe Gitlab::ImportExport::Project::Sample::RelationTreeRestorer do end end - context 'when restoring a project' do - let(:importable) { create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') } - let(:importable_name) { 'project' } - let(:importable_path) { 'project' } - let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder } - let(:relation_factory) { Gitlab::ImportExport::Project::Sample::RelationFactory } - let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) } - let(:path) { 'spec/fixtures/lib/gitlab/import_export/sample_data/tree' } - let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } - - it 'initializes relation_factory with date_calculator as parameter' do - expect(Gitlab::ImportExport::Project::Sample::RelationFactory).to receive(:create).with(hash_including(:date_calculator)).at_least(:once).times + it 'initializes relation_factory with date_calculator as parameter' do + expect(Gitlab::ImportExport::Project::Sample::RelationFactory).to receive(:create).with(hash_including(:date_calculator)).at_least(:once).times - subject - end + subject + end - context 'when relation tree restorer is initialized' do - it 'initializes date calculator with due dates' do - expect(Gitlab::ImportExport::Project::Sample::DateCalculator).to receive(:new).with(Array) + context 'when relation tree restorer is initialized' do + it 'initializes date calculator with due dates' do + expect(Gitlab::ImportExport::Project::Sample::DateCalculator).to receive(:new).with(Array) - sample_data_relation_tree_restorer - end + sample_data_relation_tree_restorer end + end - context 'using ndjson reader' do - it_behaves_like 'import project successfully' - end + context 'using ndjson reader' do + it_behaves_like 'import project successfully' end end diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb index f512f49764d..cd3d29f1a51 100644 --- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb @@ -23,7 +23,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do ] RSpec::Mocks.with_temporary_scope do - @project = create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') + @project = create(:project, :repository, :builds_enabled, :issues_disabled, name: 'project', path: 'project') @shared = @project.import_export_shared stub_all_feature_flags @@ -36,7 +36,6 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do allow_any_instance_of(Gitlab::Git::Repository).to receive(:branch_exists?).and_return(false) expect(@shared).not_to receive(:error) - expect_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch).with('feature', 'DCBA') allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_branch) project_tree_restorer = described_class.new(user: @user, shared: @shared, project: @project) diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb index 374d688576e..f68ec21039d 100644 --- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb +++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb @@ -5,6 +5,9 @@ require 'spec_helper' RSpec.describe Gitlab::ImportExport::Project::TreeSaver do let_it_be(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" } let_it_be(:exportable_path) { 'project' } + let_it_be(:user) { create(:user) } + let_it_be(:group) { create(:group) } + let_it_be(:project) { setup_project } shared_examples 'saves project tree successfully' do |ndjson_enabled| include ImportExport::CommonUtil @@ -12,9 +15,6 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do subject { get_json(full_path, exportable_path, relation_name, ndjson_enabled) } describe 'saves project tree attributes' do - let_it_be(:user) { create(:user) } - let_it_be(:group) { create(:group) } - let_it_be(:project) { setup_project } let_it_be(:shared) { project.import_export_shared } let(:relation_name) { :projects } @@ -402,6 +402,50 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do it_behaves_like "saves project tree successfully", true end + context 'when streaming has to retry', :aggregate_failures do + let(:shared) { double('shared', export_path: exportable_path) } + let(:logger) { Gitlab::Import::Logger.build } + let(:serializer) { double('serializer') } + let(:error_class) { Net::OpenTimeout } + let(:info_params) do + { + 'error.class': error_class, + project_name: project.name, + project_id: project.id + } + end + + before do + allow(Gitlab::ImportExport::Json::StreamingSerializer).to receive(:new).and_return(serializer) + end + + subject(:project_tree_saver) do + described_class.new(project: project, current_user: user, shared: shared, logger: logger) + end + + it 'retries and succeeds' do + call_count = 0 + allow(serializer).to receive(:execute) do + call_count += 1 + call_count > 1 ? true : raise(error_class, 'execution expired') + end + + expect(logger).to receive(:info).with(hash_including(info_params)).once + + expect(project_tree_saver.save).to be(true) + end + + it 'retries and does not succeed' do + retry_count = 3 + allow(serializer).to receive(:execute).and_raise(error_class, 'execution expired') + + expect(logger).to receive(:info).with(hash_including(info_params)).exactly(retry_count).times + expect(shared).to receive(:error).with(instance_of(error_class)) + + expect(project_tree_saver.save).to be(false) + end + end + def setup_project release = create(:release) diff --git a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb deleted file mode 100644 index 5e4075c2b59..00000000000 --- a/spec/lib/gitlab/import_export/relation_tree_restorer_spec.rb +++ /dev/null @@ -1,184 +0,0 @@ -# frozen_string_literal: true - -# This spec is a lightweight version of: -# * project/tree_restorer_spec.rb -# -# In depth testing is being done in the above specs. -# This spec tests that restore project works -# but does not have 100% relation coverage. - -require 'spec_helper' - -RSpec.describe Gitlab::ImportExport::RelationTreeRestorer do - include_context 'relation tree restorer shared context' - - let(:relation_tree_restorer) do - described_class.new( - user: user, - shared: shared, - relation_reader: relation_reader, - object_builder: object_builder, - members_mapper: members_mapper, - relation_factory: relation_factory, - reader: reader, - importable: importable, - importable_path: importable_path, - importable_attributes: attributes - ) - end - - subject { relation_tree_restorer.restore } - - shared_examples 'import project successfully' do - describe 'imported project' do - it 'has the project attributes and relations', :aggregate_failures do - expect(subject).to eq(true) - - project = Project.find_by_path('project') - - expect(project.description).to eq('Nisi et repellendus ut enim quo accusamus vel magnam.') - expect(project.labels.count).to eq(3) - expect(project.boards.count).to eq(1) - expect(project.project_feature).not_to be_nil - expect(project.custom_attributes.count).to eq(2) - expect(project.project_badges.count).to eq(2) - expect(project.snippets.count).to eq(1) - end - end - end - - shared_examples 'logging of relations creation' do - context 'when log_import_export_relation_creation feature flag is enabled' do - before do - stub_feature_flags(log_import_export_relation_creation: group) - end - - it 'logs top-level relation creation' do - expect(relation_tree_restorer.shared.logger) - .to receive(:info) - .with(hash_including(message: '[Project/Group Import] Created new object relation')) - .at_least(:once) - - subject - end - end - - context 'when log_import_export_relation_creation feature flag is disabled' do - before do - stub_feature_flags(log_import_export_relation_creation: false) - end - - it 'does not log top-level relation creation' do - expect(relation_tree_restorer.shared.logger) - .to receive(:info) - .with(hash_including(message: '[Project/Group Import] Created new object relation')) - .never - - subject - end - end - end - - context 'when restoring a project' do - let_it_be(:importable, reload: true) do - create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project') - end - - let(:importable_name) { 'project' } - let(:importable_path) { 'project' } - let(:object_builder) { Gitlab::ImportExport::Project::ObjectBuilder } - let(:relation_factory) { Gitlab::ImportExport::Project::RelationFactory } - let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) } - - context 'using legacy reader' do - let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/project.json' } - let(:relation_reader) do - Gitlab::ImportExport::Json::LegacyReader::File.new( - path, - relation_names: reader.project_relation_names, - allowed_path: 'project' - ) - end - - let(:attributes) { relation_reader.consume_attributes('project') } - - it_behaves_like 'import project successfully' - - context 'logging of relations creation' do - let_it_be(:group) { create(:group) } - let_it_be(:importable) do - create(:project, :builds_enabled, :issues_disabled, name: 'project', path: 'project', group: group) - end - - include_examples 'logging of relations creation' - end - end - - context 'using ndjson reader' do - let(:path) { 'spec/fixtures/lib/gitlab/import_export/complex/tree' } - let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } - - it_behaves_like 'import project successfully' - - context 'when inside a group' do - let_it_be(:group) do - create(:group, :disabled_and_unoverridable) - end - - before do - importable.update!(shared_runners_enabled: false, group: group) - end - - it_behaves_like 'import project successfully' - end - end - - context 'with invalid relations' do - let(:path) { 'spec/fixtures/lib/gitlab/import_export/project_with_invalid_relations/tree' } - let(:relation_reader) { Gitlab::ImportExport::Json::NdjsonReader.new(path) } - - it 'logs the invalid relation and its errors' do - expect(relation_tree_restorer.shared.logger) - .to receive(:warn) - .with( - error_messages: "Title can't be blank. Title is invalid", - message: '[Project/Group Import] Invalid object relation built', - relation_class: 'ProjectLabel', - relation_index: 0, - relation_key: 'labels' - ).once - - relation_tree_restorer.restore - end - end - end - - context 'when restoring a group' do - let_it_be(:group) { create(:group) } - let_it_be(:importable) { create(:group, parent: group) } - - let(:path) { 'spec/fixtures/lib/gitlab/import_export/group_exports/no_children/group.json' } - let(:importable_name) { nil } - let(:importable_path) { nil } - let(:object_builder) { Gitlab::ImportExport::Group::ObjectBuilder } - let(:relation_factory) { Gitlab::ImportExport::Group::RelationFactory } - let(:relation_reader) do - Gitlab::ImportExport::Json::LegacyReader::File.new( - path, - relation_names: reader.group_relation_names) - end - - let(:reader) do - Gitlab::ImportExport::Reader.new( - shared: shared, - config: Gitlab::ImportExport::Config.new(config: Gitlab::ImportExport.legacy_group_config_file).to_h - ) - end - - it 'restores group tree' do - expect(subject).to eq(true) - end - - include_examples 'logging of relations creation' - end -end diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml index 4b125cab49b..9daa3b32fd1 100644 --- a/spec/lib/gitlab/import_export/safe_model_attributes.yml +++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml @@ -561,6 +561,7 @@ Project: - require_password_to_approve - autoclose_referenced_issues - suggestion_commit_message +- merge_commit_template ProjectTracingSetting: - external_url Author: @@ -692,6 +693,7 @@ ProjectCiCdSetting: ProjectSetting: - allow_merge_on_skipped_pipeline - has_confluence +- has_shimo - has_vulnerabilities ProtectedEnvironment: - id diff --git a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb index b2a11353d0c..09280402e2b 100644 --- a/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb +++ b/spec/lib/gitlab/instrumentation/redis_interceptor_spec.rb @@ -111,45 +111,4 @@ RSpec.describe Gitlab::Instrumentation::RedisInterceptor, :clean_gitlab_redis_sh end end end - - context 'when a command takes longer than DURATION_ERROR_THRESHOLD' do - let(:threshold) { 0.5 } - - before do - stub_const("#{described_class}::DURATION_ERROR_THRESHOLD", threshold) - end - - context 'when report_on_long_redis_durations is disabled' do - it 'does nothing' do - stub_feature_flags(report_on_long_redis_durations: false) - - expect(Gitlab::ErrorTracking).not_to receive(:track_exception) - - Gitlab::Redis::SharedState.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } } - end - end - - context 'when report_on_long_redis_durations is enabled' do - context 'for an instance other than SharedState' do - it 'does nothing' do - expect(Gitlab::ErrorTracking).not_to receive(:track_exception) - - Gitlab::Redis::Queues.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } } - end - end - - context 'for the SharedState instance' do - it 'tracks an exception and continues' do - expect(Gitlab::ErrorTracking) - .to receive(:track_exception) - .with(an_instance_of(described_class::MysteryRedisDurationError), - command: 'mget', - duration: be > threshold, - timestamp: a_string_matching(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{5}/)) - - Gitlab::Redis::SharedState.with { |r| r.mget('foo', 'foo') { sleep threshold + 0.1 } } - end - end - end - end end diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb index 52d3623c304..a9663012e9a 100644 --- a/spec/lib/gitlab/instrumentation_helper_spec.rb +++ b/spec/lib/gitlab/instrumentation_helper_spec.rb @@ -147,6 +147,25 @@ RSpec.describe Gitlab::InstrumentationHelper do expect(payload).not_to include(:caught_up_replica_pick_fail) end end + + context 'when there is an uploaded file' do + it 'adds upload data' do + uploaded_file = UploadedFile.from_params({ + 'name' => 'dir/foo.txt', + 'sha256' => 'sha256', + 'remote_url' => 'http://localhost/file', + 'remote_id' => '1234567890', + 'etag' => 'etag1234567890', + 'upload_duration' => '5.05', + 'size' => '123456' + }, nil) + + subject + + expect(payload[:uploaded_file_upload_duration_s]).to eq(uploaded_file.upload_duration) + expect(payload[:uploaded_file_size_bytes]).to eq(uploaded_file.size) + end + end end describe 'duration calculations' do diff --git a/spec/lib/gitlab/issues/rebalancing/state_spec.rb b/spec/lib/gitlab/issues/rebalancing/state_spec.rb index bdd0dbd365d..a849330ad35 100644 --- a/spec/lib/gitlab/issues/rebalancing/state_spec.rb +++ b/spec/lib/gitlab/issues/rebalancing/state_spec.rb @@ -94,7 +94,7 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st context 'when tracking new rebalance' do it 'returns as expired for non existent key' do ::Gitlab::Redis::SharedState.with do |redis| - expect(redis.ttl(rebalance_caching.send(:concurrent_running_rebalances_key))).to be < 0 + expect(redis.ttl(Gitlab::Issues::Rebalancing::State::CONCURRENT_RUNNING_REBALANCES_KEY)).to be < 0 end end @@ -102,7 +102,7 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st rebalance_caching.track_new_running_rebalance ::Gitlab::Redis::SharedState.with do |redis| - expect(redis.ttl(rebalance_caching.send(:concurrent_running_rebalances_key))).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i) + expect(redis.ttl(Gitlab::Issues::Rebalancing::State::CONCURRENT_RUNNING_REBALANCES_KEY)).to be_between(0, described_class::REDIS_EXPIRY_TIME.ago.to_i) end end end @@ -169,7 +169,7 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st rebalance_caching.cleanup_cache - expect(check_existing_keys).to eq(0) + expect(check_existing_keys).to eq(1) end end end @@ -183,6 +183,16 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st it { expect(rebalance_caching.send(:rebalanced_container_type)).to eq(described_class::NAMESPACE) } it_behaves_like 'issues rebalance caching' + + describe '.fetch_rebalancing_groups_and_projects' do + before do + rebalance_caching.track_new_running_rebalance + end + + it 'caches recently finished rebalance key' do + expect(described_class.fetch_rebalancing_groups_and_projects).to eq([[group.id], []]) + end + end end context 'rebalancing issues in a project' do @@ -193,6 +203,16 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st it { expect(rebalance_caching.send(:rebalanced_container_type)).to eq(described_class::PROJECT) } it_behaves_like 'issues rebalance caching' + + describe '.fetch_rebalancing_groups_and_projects' do + before do + rebalance_caching.track_new_running_rebalance + end + + it 'caches recently finished rebalance key' do + expect(described_class.fetch_rebalancing_groups_and_projects).to eq([[], [project.id]]) + end + end end # count - how many issue ids to generate, issue ids will start at 1 @@ -212,11 +232,14 @@ RSpec.describe Gitlab::Issues::Rebalancing::State, :clean_gitlab_redis_shared_st def check_existing_keys index = 0 + # spec only, we do not actually scan keys in the code + recently_finished_keys_count = Gitlab::Redis::SharedState.with { |redis| redis.scan(0, match: "#{described_class::RECENTLY_FINISHED_REBALANCE_PREFIX}:*") }.last.count index += 1 if rebalance_caching.get_current_index > 0 index += 1 if rebalance_caching.get_current_project_id.present? index += 1 if rebalance_caching.get_cached_issue_ids(0, 100).present? index += 1 if rebalance_caching.rebalance_in_progress? + index += 1 if recently_finished_keys_count > 0 index end diff --git a/spec/lib/gitlab/lograge/custom_options_spec.rb b/spec/lib/gitlab/lograge/custom_options_spec.rb index 9daedfc37e4..a4ae39a835a 100644 --- a/spec/lib/gitlab/lograge/custom_options_spec.rb +++ b/spec/lib/gitlab/lograge/custom_options_spec.rb @@ -19,7 +19,13 @@ RSpec.describe Gitlab::Lograge::CustomOptions do user_id: 'test', cf_ray: SecureRandom.hex, cf_request_id: SecureRandom.hex, - metadata: { 'meta.user' => 'jane.doe' } + metadata: { 'meta.user' => 'jane.doe' }, + request_urgency: :default, + target_duration_s: 1, + remote_ip: '192.168.1.2', + ua: 'Nyxt', + queue_duration_s: 0.2, + etag_route: '/etag' } end @@ -66,6 +72,18 @@ RSpec.describe Gitlab::Lograge::CustomOptions do end end + context 'trusted payload' do + it { is_expected.to include(event_payload.slice(*described_class::KNOWN_PAYLOAD_PARAMS)) } + + context 'payload with rejected fields' do + let(:event_payload) { { params: {}, request_urgency: :high, something: 'random', username: nil } } + + it { is_expected.to include({ request_urgency: :high }) } + it { is_expected.not_to include({ something: 'random' }) } + it { is_expected.not_to include({ username: nil }) } + end + end + context 'when correlation_id is overridden' do let(:correlation_id_key) { Labkit::Correlation::CorrelationId::LOG_KEY } diff --git a/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb b/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb new file mode 100644 index 00000000000..884f8df5e56 --- /dev/null +++ b/spec/lib/gitlab/merge_requests/merge_commit_message_spec.rb @@ -0,0 +1,219 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::MergeRequests::MergeCommitMessage do + let(:merge_commit_template) { nil } + let(:project) { create(:project, :public, :repository, merge_commit_template: merge_commit_template) } + let(:user) { project.creator } + let(:merge_request_description) { "Merge Request Description\nNext line" } + let(:merge_request_title) { 'Bugfix' } + let(:merge_request) do + create( + :merge_request, + :simple, + source_project: project, + target_project: project, + author: user, + description: merge_request_description, + title: merge_request_title + ) + end + + subject { described_class.new(merge_request: merge_request) } + + it 'returns nil when template is not set in target project' do + expect(subject.message).to be_nil + end + + context 'when project has custom merge commit template' do + let(:merge_commit_template) { <<~MSG.rstrip } + %{title} + + See merge request %{reference} + MSG + + it 'uses custom template' do + expect(subject.message).to eq <<~MSG.rstrip + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + + context 'when project has merge commit template with closed issues' do + let(:merge_commit_template) { <<~MSG.rstrip } + Merge branch '%{source_branch}' into '%{target_branch}' + + %{title} + + %{issues} + + See merge request %{reference} + MSG + + it 'omits issues and new lines when no issues are mentioned in description' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when MR closes issues' do + let(:issue_1) { create(:issue, project: project) } + let(:issue_2) { create(:issue, project: project) } + let(:merge_request_description) { "Description\n\nclosing #{issue_1.to_reference}, #{issue_2.to_reference}" } + + it 'includes them and keeps new line characters' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + Closes #{issue_1.to_reference} and #{issue_2.to_reference} + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + end + + context 'when project has merge commit template with description' do + let(:merge_commit_template) { <<~MSG.rstrip } + Merge branch '%{source_branch}' into '%{target_branch}' + + %{title} + + %{description} + + See merge request %{reference} + MSG + + it 'uses template' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + Merge Request Description + Next line + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when description is empty string' do + let(:merge_request_description) { '' } + + it 'skips description placeholder and removes new line characters before it' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + + context 'when description is nil' do + let(:merge_request_description) { nil } + + it 'skips description placeholder and removes new line characters before it' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + + context 'when description is blank string' do + let(:merge_request_description) { "\n\r \n" } + + it 'skips description placeholder and removes new line characters before it' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + end + + context 'when custom merge commit template contains placeholder in the middle or beginning of the line' do + let(:merge_commit_template) { <<~MSG.rstrip } + Merge branch '%{source_branch}' into '%{target_branch}' + + %{description} %{title} + + See merge request %{reference} + MSG + + it 'uses custom template' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Merge Request Description + Next line Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when description is empty string' do + let(:merge_request_description) { '' } + + it 'does not remove new line characters before empty placeholder' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + end + + context 'when project has template with CRLF newlines' do + let(:merge_commit_template) do + "Merge branch '%{source_branch}' into '%{target_branch}'\r\n\r\n%{title}\r\n\r\n%{description}\r\n\r\nSee merge request %{reference}" + end + + it 'converts it to LF newlines' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + Merge Request Description + Next line + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + + context 'when description is empty string' do + let(:merge_request_description) { '' } + + it 'skips description placeholder and removes new line characters before it' do + expect(subject.message).to eq <<~MSG.rstrip + Merge branch 'feature' into 'master' + + Bugfix + + See merge request #{merge_request.to_reference(full: true)} + MSG + end + end + end +end diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb index d36ee24fc50..83bee84df99 100644 --- a/spec/lib/gitlab/metrics/background_transaction_spec.rb +++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb @@ -4,27 +4,28 @@ require 'spec_helper' RSpec.describe Gitlab::Metrics::BackgroundTransaction do let(:transaction) { described_class.new } - let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) } - - before do - allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric) - end describe '#run' do + let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) } + + before do + allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric) + end + it 'yields the supplied block' do expect { |b| transaction.run(&b) }.to yield_control end it 'stores the transaction in the current thread' do transaction.run do - expect(Thread.current[described_class::BACKGROUND_THREAD_KEY]).to eq(transaction) + expect(Thread.current[described_class::THREAD_KEY]).to eq(transaction) end end it 'removes the transaction from the current thread upon completion' do transaction.run { } - expect(Thread.current[described_class::BACKGROUND_THREAD_KEY]).to be_nil + expect(Thread.current[described_class::THREAD_KEY]).to be_nil end end @@ -68,7 +69,10 @@ RSpec.describe Gitlab::Metrics::BackgroundTransaction do end end - RSpec.shared_examples 'metric with labels' do |metric_method| + it_behaves_like 'transaction metrics with labels' do + let(:transaction_obj) { described_class.new } + let(:labels) { { endpoint_id: 'TestWorker', feature_category: 'projects', queue: 'test_worker' } } + before do test_worker_class = Class.new do def self.queue @@ -78,33 +82,10 @@ RSpec.describe Gitlab::Metrics::BackgroundTransaction do stub_const('TestWorker', test_worker_class) end - it 'measures with correct labels and value' do - value = 1 - expect(prometheus_metric).to receive(metric_method).with({ - endpoint_id: 'TestWorker', feature_category: 'projects', queue: 'test_worker' - }, value) - + around do |example| Gitlab::ApplicationContext.with_raw_context(feature_category: 'projects', caller_id: 'TestWorker') do - transaction.send(metric_method, :test_metric, value) + example.run end end end - - describe '#increment' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) } - - it_behaves_like 'metric with labels', :increment - end - - describe '#set' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, :set, base_labels: {}) } - - it_behaves_like 'metric with labels', :set - end - - describe '#observe' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, :observe, base_labels: {}) } - - it_behaves_like 'metric with labels', :observe - end end diff --git a/spec/lib/gitlab/metrics/method_call_spec.rb b/spec/lib/gitlab/metrics/method_call_spec.rb index fb5436a90e3..6aa89c7cb05 100644 --- a/spec/lib/gitlab/metrics/method_call_spec.rb +++ b/spec/lib/gitlab/metrics/method_call_spec.rb @@ -37,7 +37,7 @@ RSpec.describe Gitlab::Metrics::MethodCall do it 'metric is not a NullMetric' do method_call.measure { 'foo' } - expect(::Gitlab::Metrics::Transaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).not_to be_instance_of(Gitlab::Metrics::NullMetric) + expect(::Gitlab::Metrics::WebTransaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).not_to be_instance_of(Gitlab::Metrics::NullMetric) end it 'observes the performance of the supplied block' do @@ -63,7 +63,7 @@ RSpec.describe Gitlab::Metrics::MethodCall do it 'observes using NullMetric' do method_call.measure { 'foo' } - expect(::Gitlab::Metrics::Transaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).to be_instance_of(Gitlab::Metrics::NullMetric) + expect(::Gitlab::Metrics::WebTransaction.prometheus_metric(:gitlab_method_call_duration_seconds, :histogram)).to be_instance_of(Gitlab::Metrics::NullMetric) end end end diff --git a/spec/lib/gitlab/metrics/rails_slis_spec.rb b/spec/lib/gitlab/metrics/rails_slis_spec.rb index 16fcb9d46a2..a5ccf7fafa4 100644 --- a/spec/lib/gitlab/metrics/rails_slis_spec.rb +++ b/spec/lib/gitlab/metrics/rails_slis_spec.rb @@ -10,49 +10,62 @@ RSpec.describe Gitlab::Metrics::RailsSlis do allow(Gitlab::RequestEndpoints).to receive(:all_api_endpoints).and_return([api_route]) allow(Gitlab::RequestEndpoints).to receive(:all_controller_actions).and_return([[ProjectsController, 'show']]) + allow(Gitlab::Graphql::KnownOperations).to receive(:default).and_return(Gitlab::Graphql::KnownOperations.new(%w(foo bar))) end describe '.initialize_request_slis_if_needed!' do - it "initializes the SLI for all possible endpoints if they weren't" do + it "initializes the SLI for all possible endpoints if they weren't", :aggregate_failures do possible_labels = [ { endpoint_id: "GET /api/:version/version", - feature_category: :not_owned + feature_category: :not_owned, + request_urgency: :default }, { endpoint_id: "ProjectsController#show", - feature_category: :projects + feature_category: :projects, + request_urgency: :default } ] + possible_graphql_labels = ['graphql:foo', 'graphql:bar', 'graphql:unknown', 'graphql:anonymous'].map do |endpoint_id| + { + endpoint_id: endpoint_id, + feature_category: nil, + query_urgency: ::Gitlab::EndpointAttributes::DEFAULT_URGENCY.name + } + end + expect(Gitlab::Metrics::Sli).to receive(:initialized?).with(:rails_request_apdex) { false } + expect(Gitlab::Metrics::Sli).to receive(:initialized?).with(:graphql_query_apdex) { false } expect(Gitlab::Metrics::Sli).to receive(:initialize_sli).with(:rails_request_apdex, array_including(*possible_labels)).and_call_original + expect(Gitlab::Metrics::Sli).to receive(:initialize_sli).with(:graphql_query_apdex, array_including(*possible_graphql_labels)).and_call_original described_class.initialize_request_slis_if_needed! end - it 'does not initialize the SLI if they were initialized already' do + it 'does not initialize the SLI if they were initialized already', :aggregate_failures do expect(Gitlab::Metrics::Sli).to receive(:initialized?).with(:rails_request_apdex) { true } + expect(Gitlab::Metrics::Sli).to receive(:initialized?).with(:graphql_query_apdex) { true } expect(Gitlab::Metrics::Sli).not_to receive(:initialize_sli) described_class.initialize_request_slis_if_needed! end + end - it 'does not initialize anything if the feature flag is disabled' do - stub_feature_flags(request_apdex_counters: false) - - expect(Gitlab::Metrics::Sli).not_to receive(:initialize_sli) - expect(Gitlab::Metrics::Sli).not_to receive(:initialized?) - + describe '.request_apdex' do + it 'returns the initialized request apdex SLI object' do described_class.initialize_request_slis_if_needed! + + expect(described_class.request_apdex).to be_initialized end end - describe '.request_apdex' do + describe '.graphql_query_apdex' do it 'returns the initialized request apdex SLI object' do described_class.initialize_request_slis_if_needed! - expect(described_class.request_apdex).to be_initialized + expect(described_class.graphql_query_apdex).to be_initialized end end end diff --git a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb index 5870f9a8f68..3396de9b12c 100644 --- a/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb +++ b/spec/lib/gitlab/metrics/requests_rack_middleware_spec.rb @@ -36,7 +36,8 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do it 'tracks request count and duration' do expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown') expect(described_class).to receive_message_chain(:http_request_duration_seconds, :observe).with({ method: 'get' }, a_positive_execution_time) - expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, success: true) + expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment) + .with(labels: { feature_category: 'unknown', endpoint_id: 'unknown', request_urgency: :default }, success: true) subject.call(env) end @@ -115,14 +116,14 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do context 'application context' do context 'when a context is present' do before do - ::Gitlab::ApplicationContext.push(feature_category: 'issue_tracking', caller_id: 'IssuesController#show') + ::Gitlab::ApplicationContext.push(feature_category: 'team_planning', caller_id: 'IssuesController#show') end it 'adds the feature category to the labels for required metrics' do - expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'issue_tracking') + expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'team_planning') expect(described_class).not_to receive(:http_health_requests_total) expect(Gitlab::Metrics::RailsSlis.request_apdex) - .to receive(:increment).with(labels: { feature_category: 'issue_tracking', endpoint_id: 'IssuesController#show' }, success: true) + .to receive(:increment).with(labels: { feature_category: 'team_planning', endpoint_id: 'IssuesController#show', request_urgency: :default }, success: true) subject.call(env) end @@ -140,12 +141,12 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do context 'when application raises an exception when the feature category context is present' do before do - ::Gitlab::ApplicationContext.push(feature_category: 'issue_tracking') + ::Gitlab::ApplicationContext.push(feature_category: 'team_planning') allow(app).to receive(:call).and_raise(StandardError) end it 'adds the feature category to the labels for http_requests_total' do - expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'issue_tracking') + expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: 'undefined', feature_category: 'team_planning') expect(Gitlab::Metrics::RailsSlis).not_to receive(:request_apdex) expect { subject.call(env) }.to raise_error(StandardError) @@ -156,7 +157,8 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do it 'sets the required labels to unknown' do expect(described_class).to receive_message_chain(:http_requests_total, :increment).with(method: 'get', status: '200', feature_category: 'unknown') expect(described_class).not_to receive(:http_health_requests_total) - expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with(labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, success: true) + expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment) + .with(labels: { feature_category: 'unknown', endpoint_id: 'unknown', request_urgency: :default }, success: true) subject.call(env) end @@ -206,7 +208,11 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do it "captures SLI metrics" do expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'hello_world', endpoint_id: 'GET /projects/:id/archive' }, + labels: { + feature_category: 'hello_world', + endpoint_id: 'GET /projects/:id/archive', + request_urgency: request_urgency_name + }, success: success ) subject.call(env) @@ -235,7 +241,11 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do it "captures SLI metrics" do expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'hello_world', endpoint_id: 'AnonymousController#index' }, + labels: { + feature_category: 'hello_world', + endpoint_id: 'AnonymousController#index', + request_urgency: request_urgency_name + }, success: success ) subject.call(env) @@ -255,17 +265,25 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do let(:api_handler) { Class.new(::API::Base) } - it "falls back request's expectation to medium (1 second)" do + it "falls back request's expectation to default (1 second)" do allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100.9) expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :default + }, success: true ) subject.call(env) allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 101) expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :default + }, success: false ) subject.call(env) @@ -281,17 +299,25 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do { 'action_controller.instance' => controller_instance, 'REQUEST_METHOD' => 'GET' } end - it "falls back request's expectation to medium (1 second)" do + it "falls back request's expectation to default (1 second)" do allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100.9) expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :default + }, success: true ) subject.call(env) allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 101) expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :default + }, success: false ) subject.call(env) @@ -303,17 +329,25 @@ RSpec.describe Gitlab::Metrics::RequestsRackMiddleware, :aggregate_failures do { 'REQUEST_METHOD' => 'GET' } end - it "falls back request's expectation to medium (1 second)" do + it "falls back request's expectation to default (1 second)" do allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 100.9) expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :default + }, success: true ) subject.call(env) allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(100, 101) expect(Gitlab::Metrics::RailsSlis.request_apdex).to receive(:increment).with( - labels: { feature_category: 'unknown', endpoint_id: 'unknown' }, + labels: { + feature_category: 'unknown', + endpoint_id: 'unknown', + request_urgency: :default + }, success: false ) subject.call(env) diff --git a/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb index f751416f4ec..d834b796179 100644 --- a/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/action_cable_sampler_spec.rb @@ -23,64 +23,46 @@ RSpec.describe Gitlab::Metrics::Samplers::ActionCableSampler do allow(pool).to receive(:queue_length).and_return(6) end - shared_examples 'collects metrics' do |expected_labels| - it 'includes active connections' do - expect(subject.metrics[:active_connections]).to receive(:set).with(expected_labels, 0) + it 'includes active connections' do + expect(subject.metrics[:active_connections]).to receive(:set).with({}, 0) - subject.sample - end - - it 'includes minimum worker pool size' do - expect(subject.metrics[:pool_min_size]).to receive(:set).with(expected_labels, 1) - - subject.sample - end - - it 'includes maximum worker pool size' do - expect(subject.metrics[:pool_max_size]).to receive(:set).with(expected_labels, 2) - - subject.sample - end + subject.sample + end - it 'includes current worker pool size' do - expect(subject.metrics[:pool_current_size]).to receive(:set).with(expected_labels, 3) + it 'includes minimum worker pool size' do + expect(subject.metrics[:pool_min_size]).to receive(:set).with({}, 1) - subject.sample - end + subject.sample + end - it 'includes largest worker pool size' do - expect(subject.metrics[:pool_largest_size]).to receive(:set).with(expected_labels, 4) + it 'includes maximum worker pool size' do + expect(subject.metrics[:pool_max_size]).to receive(:set).with({}, 2) - subject.sample - end + subject.sample + end - it 'includes worker pool completed task count' do - expect(subject.metrics[:pool_completed_tasks]).to receive(:set).with(expected_labels, 5) + it 'includes current worker pool size' do + expect(subject.metrics[:pool_current_size]).to receive(:set).with({}, 3) - subject.sample - end + subject.sample + end - it 'includes worker pool pending task count' do - expect(subject.metrics[:pool_pending_tasks]).to receive(:set).with(expected_labels, 6) + it 'includes largest worker pool size' do + expect(subject.metrics[:pool_largest_size]).to receive(:set).with({}, 4) - subject.sample - end + subject.sample end - context 'for in-app mode' do - before do - expect(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(true) - end + it 'includes worker pool completed task count' do + expect(subject.metrics[:pool_completed_tasks]).to receive(:set).with({}, 5) - it_behaves_like 'collects metrics', server_mode: 'in-app' + subject.sample end - context 'for standalone mode' do - before do - expect(Gitlab::ActionCable::Config).to receive(:in_app?).and_return(false) - end + it 'includes worker pool pending task count' do + expect(subject.metrics[:pool_pending_tasks]).to receive(:set).with({}, 6) - it_behaves_like 'collects metrics', server_mode: 'standalone' + subject.sample end end end diff --git a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb index 7dda10ab41d..e97a4fdddcb 100644 --- a/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb +++ b/spec/lib/gitlab/metrics/samplers/database_sampler_spec.rb @@ -18,8 +18,8 @@ RSpec.describe Gitlab::Metrics::Samplers::DatabaseSampler do let(:labels) do { class: 'ActiveRecord::Base', - host: Gitlab::Database.main.config['host'], - port: Gitlab::Database.main.config['port'] + host: ApplicationRecord.database.config['host'], + port: ApplicationRecord.database.config['port'] } end diff --git a/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb index adbc05cb711..e489ac97b9c 100644 --- a/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb +++ b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb @@ -3,7 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do - let(:transaction) { Gitlab::Metrics::Transaction.new } + let(:transaction) { Gitlab::Metrics::WebTransaction.new({}) } let(:subscriber) { described_class.new } around do |example| diff --git a/spec/lib/gitlab/metrics/transaction_spec.rb b/spec/lib/gitlab/metrics/transaction_spec.rb index 2ff8efcd7cb..b1c15db5193 100644 --- a/spec/lib/gitlab/metrics/transaction_spec.rb +++ b/spec/lib/gitlab/metrics/transaction_spec.rb @@ -3,172 +3,7 @@ require 'spec_helper' RSpec.describe Gitlab::Metrics::Transaction do - let(:transaction) { described_class.new } - - let(:sensitive_tags) do - { - path: 'private', - branch: 'sensitive' - } - end - - describe '#method_call_for' do - it 'returns a MethodCall' do - method = transaction.method_call_for('Foo#bar', :Foo, '#bar') - - expect(method).to be_an_instance_of(Gitlab::Metrics::MethodCall) - end - end - describe '#run' do - specify { expect { transaction.run }.to raise_error(NotImplementedError) } - end - - describe '#add_event' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil, base_labels: {}) } - - it 'adds a metric' do - expect(prometheus_metric).to receive(:increment) - expect(described_class).to receive(:fetch_metric).with(:counter, :gitlab_transaction_event_meow_total).and_return(prometheus_metric) - - transaction.add_event(:meow) - end - - it 'allows tracking of custom tags' do - expect(prometheus_metric).to receive(:increment).with(hash_including(animal: "dog")) - expect(described_class).to receive(:fetch_metric).with(:counter, :gitlab_transaction_event_bau_total).and_return(prometheus_metric) - - transaction.add_event(:bau, animal: 'dog') - end - - context 'with sensitive tags' do - before do - transaction.add_event(:baubau, **sensitive_tags.merge(sane: 'yes')) - allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric) - end - - it 'filters tags' do - expect(prometheus_metric).not_to receive(:increment).with(hash_including(sensitive_tags)) - - transaction.add_event(:baubau, **sensitive_tags.merge(sane: 'yes')) - end - end - end - - describe '#increment' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, increment: nil, base_labels: {}) } - - it 'adds a metric' do - expect(prometheus_metric).to receive(:increment) - expect(::Gitlab::Metrics).to receive(:counter).with(:meow, 'Meow counter', hash_including(:controller, :action)).and_return(prometheus_metric) - - transaction.increment(:meow, 1) - end - - context 'with block' do - it 'overrides docstring' do - expect(::Gitlab::Metrics).to receive(:counter).with(:block_docstring, 'test', hash_including(:controller, :action)).and_return(prometheus_metric) - - transaction.increment(:block_docstring, 1) do - docstring 'test' - end - end - - it 'overrides labels' do - expect(::Gitlab::Metrics).to receive(:counter).with(:block_labels, 'Block labels counter', hash_including(:controller, :action, :sane)).and_return(prometheus_metric) - - labels = { sane: 'yes' } - transaction.increment(:block_labels, 1, labels) do - label_keys %i(sane) - end - end - - it 'filters sensitive tags' do - expect(::Gitlab::Metrics).to receive(:counter).with(:metric_with_sensitive_block, 'Metric with sensitive block counter', hash_excluding(sensitive_tags)).and_return(prometheus_metric) - - labels_keys = sensitive_tags.keys - transaction.increment(:metric_with_sensitive_block, 1, sensitive_tags) do - label_keys labels_keys - end - end - end - end - - describe '#set' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, set: nil, base_labels: {}) } - - it 'adds a metric' do - expect(prometheus_metric).to receive(:set) - expect(::Gitlab::Metrics).to receive(:gauge).with(:meow_set, 'Meow set gauge', hash_including(:controller, :action), :all).and_return(prometheus_metric) - - transaction.set(:meow_set, 1) - end - - context 'with block' do - it 'overrides docstring' do - expect(::Gitlab::Metrics).to receive(:gauge).with(:block_docstring_set, 'test', hash_including(:controller, :action), :all).and_return(prometheus_metric) - - transaction.set(:block_docstring_set, 1) do - docstring 'test' - end - end - - it 'overrides labels' do - expect(::Gitlab::Metrics).to receive(:gauge).with(:block_labels_set, 'Block labels set gauge', hash_including(:controller, :action, :sane), :all).and_return(prometheus_metric) - - labels = { sane: 'yes' } - transaction.set(:block_labels_set, 1, labels) do - label_keys %i(sane) - end - end - - it 'filters sensitive tags' do - expect(::Gitlab::Metrics).to receive(:gauge).with(:metric_set_with_sensitive_block, 'Metric set with sensitive block gauge', hash_excluding(sensitive_tags), :all).and_return(prometheus_metric) - - label_keys = sensitive_tags.keys - transaction.set(:metric_set_with_sensitive_block, 1, sensitive_tags) do - label_keys label_keys - end - end - end - end - - describe '#observe' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, observe: nil, base_labels: {}) } - - it 'adds a metric' do - expect(prometheus_metric).to receive(:observe) - expect(::Gitlab::Metrics).to receive(:histogram).with(:meow_observe, 'Meow observe histogram', hash_including(:controller, :action), kind_of(Array)).and_return(prometheus_metric) - - transaction.observe(:meow_observe, 1) - end - - context 'with block' do - it 'overrides docstring' do - expect(::Gitlab::Metrics).to receive(:histogram).with(:block_docstring_observe, 'test', hash_including(:controller, :action), kind_of(Array)).and_return(prometheus_metric) - - transaction.observe(:block_docstring_observe, 1) do - docstring 'test' - end - end - - it 'overrides labels' do - expect(::Gitlab::Metrics).to receive(:histogram).with(:block_labels_observe, 'Block labels observe histogram', hash_including(:controller, :action, :sane), kind_of(Array)).and_return(prometheus_metric) - - labels = { sane: 'yes' } - transaction.observe(:block_labels_observe, 1, labels) do - label_keys %i(sane) - end - end - - it 'filters sensitive tags' do - expect(::Gitlab::Metrics).to receive(:histogram).with(:metric_observe_with_sensitive_block, 'Metric observe with sensitive block histogram', hash_excluding(sensitive_tags), kind_of(Array)).and_return(prometheus_metric) - - label_keys = sensitive_tags.keys - transaction.observe(:metric_observe_with_sensitive_block, 1, sensitive_tags) do - label_keys label_keys - end - end - end + specify { expect { described_class.new.run }.to raise_error(NotImplementedError) } end end diff --git a/spec/lib/gitlab/metrics/web_transaction_spec.rb b/spec/lib/gitlab/metrics/web_transaction_spec.rb index 9e22dccb2a2..06ce58a9e84 100644 --- a/spec/lib/gitlab/metrics/web_transaction_spec.rb +++ b/spec/lib/gitlab/metrics/web_transaction_spec.rb @@ -5,41 +5,14 @@ require 'spec_helper' RSpec.describe Gitlab::Metrics::WebTransaction do let(:env) { {} } let(:transaction) { described_class.new(env) } - let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) } - before do - allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric) - end - - RSpec.shared_context 'ActionController request' do - let(:request) { double(:request, format: double(:format, ref: :html)) } - let(:controller_class) { double(:controller_class, name: 'TestController') } - - before do - controller = double(:controller, class: controller_class, action_name: 'show', request: request) - env['action_controller.instance'] = controller - end - end + describe '#run' do + let(:prometheus_metric) { instance_double(Prometheus::Client::Metric, base_labels: {}) } - RSpec.shared_context 'transaction observe metrics' do before do + allow(described_class).to receive(:prometheus_metric).and_return(prometheus_metric) allow(transaction).to receive(:observe) end - end - - RSpec.shared_examples 'metric with labels' do |metric_method| - include_context 'ActionController request' - - it 'measures with correct labels and value' do - value = 1 - expect(prometheus_metric).to receive(metric_method).with({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT }, value) - - transaction.send(metric_method, :bau, value) - end - end - - describe '#run' do - include_context 'transaction observe metrics' it 'yields the supplied block' do expect { |b| transaction.run(&b) }.to yield_control @@ -88,14 +61,6 @@ RSpec.describe Gitlab::Metrics::WebTransaction do end end - describe '#method_call_for' do - it 'returns a MethodCall' do - method = transaction.method_call_for('Foo#bar', :Foo, '#bar') - - expect(method).to be_an_instance_of(Gitlab::Metrics::MethodCall) - end - end - describe '#labels' do context 'when request goes to Grape endpoint' do before do @@ -115,7 +80,7 @@ RSpec.describe Gitlab::Metrics::WebTransaction do end it 'contains only the labels defined for transactions' do - expect(transaction.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABEL_KEYS) + expect(transaction.labels.keys).to contain_exactly(*described_class::BASE_LABEL_KEYS) end it 'does not provide labels if route infos are missing' do @@ -129,14 +94,20 @@ RSpec.describe Gitlab::Metrics::WebTransaction do end context 'when request goes to ActionController' do - include_context 'ActionController request' + let(:request) { double(:request, format: double(:format, ref: :html)) } + let(:controller_class) { double(:controller_class, name: 'TestController') } + + before do + controller = double(:controller, class: controller_class, action_name: 'show', request: request) + env['action_controller.instance'] = controller + end it 'tags a transaction with the name and action of a controller' do expect(transaction.labels).to eq({ controller: 'TestController', action: 'show', feature_category: ::Gitlab::FeatureCategories::FEATURE_CATEGORY_DEFAULT }) end it 'contains only the labels defined for transactions' do - expect(transaction.labels.keys).to contain_exactly(*described_class.superclass::BASE_LABEL_KEYS) + expect(transaction.labels.keys).to contain_exactly(*described_class::BASE_LABEL_KEYS) end context 'when the request content type is not :html' do @@ -170,37 +141,16 @@ RSpec.describe Gitlab::Metrics::WebTransaction do end end - describe '#add_event' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) } - - it 'adds a metric' do - expect(prometheus_metric).to receive(:increment) - - transaction.add_event(:meow) - end + it_behaves_like 'transaction metrics with labels' do + let(:request) { double(:request, format: double(:format, ref: :html)) } + let(:controller_class) { double(:controller_class, name: 'TestController') } + let(:controller) { double(:controller, class: controller_class, action_name: 'show', request: request) } - it 'allows tracking of custom tags' do - expect(prometheus_metric).to receive(:increment).with(animal: "dog") + let(:transaction_obj) { described_class.new({ 'action_controller.instance' => controller }) } + let(:labels) { { controller: 'TestController', action: 'show', feature_category: 'projects' } } - transaction.add_event(:bau, animal: 'dog') + before do + ::Gitlab::ApplicationContext.push(feature_category: 'projects') end end - - describe '#increment' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Counter, :increment, base_labels: {}) } - - it_behaves_like 'metric with labels', :increment - end - - describe '#set' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Gauge, :set, base_labels: {}) } - - it_behaves_like 'metric with labels', :set - end - - describe '#observe' do - let(:prometheus_metric) { instance_double(Prometheus::Client::Histogram, :observe, base_labels: {}) } - - it_behaves_like 'metric with labels', :observe - end end diff --git a/spec/lib/gitlab/middleware/compressed_json_spec.rb b/spec/lib/gitlab/middleware/compressed_json_spec.rb new file mode 100644 index 00000000000..c5efc568971 --- /dev/null +++ b/spec/lib/gitlab/middleware/compressed_json_spec.rb @@ -0,0 +1,75 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Middleware::CompressedJson do + let_it_be(:decompressed_input) { '{"foo": "bar"}' } + let_it_be(:input) { ActiveSupport::Gzip.compress(decompressed_input) } + + let(:app) { double(:app) } + let(:middleware) { described_class.new(app) } + let(:env) do + { + 'HTTP_CONTENT_ENCODING' => 'gzip', + 'REQUEST_METHOD' => 'POST', + 'CONTENT_TYPE' => 'application/json', + 'PATH_INFO' => path, + 'rack.input' => StringIO.new(input) + } + end + + shared_examples 'decompress middleware' do + it 'replaces input with a decompressed content' do + expect(app).to receive(:call) + + middleware.call(env) + + expect(env['rack.input'].read).to eq(decompressed_input) + expect(env['CONTENT_LENGTH']).to eq(decompressed_input.length) + expect(env['HTTP_CONTENT_ENCODING']).to be_nil + end + end + + describe '#call' do + context 'with collector route' do + let(:path) { '/api/v4/error_tracking/collector/1/store'} + + it_behaves_like 'decompress middleware' + end + + context 'with collector route under relative url' do + let(:path) { '/gitlab/api/v4/error_tracking/collector/1/store'} + + before do + stub_config_setting(relative_url_root: '/gitlab') + end + + it_behaves_like 'decompress middleware' + end + + context 'with some other route' do + let(:path) { '/api/projects/123' } + + it 'keeps the original input' do + expect(app).to receive(:call) + + middleware.call(env) + + expect(env['rack.input'].read).to eq(input) + expect(env['HTTP_CONTENT_ENCODING']).to eq('gzip') + end + end + + context 'payload is too large' do + let(:body_limit) { Gitlab::Middleware::CompressedJson::MAXIMUM_BODY_SIZE } + let(:decompressed_input) { 'a' * (body_limit + 100) } + let(:input) { ActiveSupport::Gzip.compress(decompressed_input) } + let(:path) { '/api/v4/error_tracking/collector/1/envelope'} + + it 'reads only limited size' do + expect(middleware.call(env)) + .to eq([413, { 'Content-Type' => 'text/plain' }, ['Payload Too Large']]) + end + end + end +end diff --git a/spec/lib/gitlab/middleware/go_spec.rb b/spec/lib/gitlab/middleware/go_spec.rb index 0ce95fdb5af..1ef548ab29b 100644 --- a/spec/lib/gitlab/middleware/go_spec.rb +++ b/spec/lib/gitlab/middleware/go_spec.rb @@ -147,6 +147,22 @@ RSpec.describe Gitlab::Middleware::Go do end end end + + context 'when a personal access token is missing' do + before do + env['REMOTE_ADDR'] = '192.168.0.1' + env['HTTP_AUTHORIZATION'] = ActionController::HttpAuthentication::Basic.encode_credentials(current_user.username, 'dummy_password') + end + + it 'returns unauthorized' do + expect(Gitlab::Auth).to receive(:find_for_git_client).and_raise(Gitlab::Auth::MissingPersonalAccessTokenError) + response = go + + expect(response[0]).to eq(401) + expect(response[1]['Content-Length']).to be_nil + expect(response[2]).to eq(['']) + end + end end end end diff --git a/spec/lib/gitlab/middleware/query_analyzer_spec.rb b/spec/lib/gitlab/middleware/query_analyzer_spec.rb new file mode 100644 index 00000000000..5ebe6a92da6 --- /dev/null +++ b/spec/lib/gitlab/middleware/query_analyzer_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Middleware::QueryAnalyzer, query_analyzers: false do + describe 'the PreventCrossDatabaseModification' do + describe '#call' do + let(:app) { double(:app) } + let(:middleware) { described_class.new(app) } + let(:env) { {} } + + subject { middleware.call(env) } + + context 'when there is a cross modification' do + before do + allow(app).to receive(:call) do + Project.transaction do + Project.where(id: -1).update_all(id: -1) + ::Ci::Pipeline.where(id: -1).update_all(id: -1) + end + end + end + + it 'detects cross modifications and tracks exception' do + expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + + expect { subject }.not_to raise_error + end + + context 'when the detect_cross_database_modification is disabled' do + before do + stub_feature_flags(detect_cross_database_modification: false) + end + + it 'does not detect cross modifications' do + expect(::Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + subject + end + end + end + + context 'when there is no cross modification' do + before do + allow(app).to receive(:call) do + Project.transaction do + Project.where(id: -1).update_all(id: -1) + Namespace.where(id: -1).update_all(id: -1) + end + end + end + + it 'does not log anything' do + expect(::Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + subject + end + end + end + end +end diff --git a/spec/lib/gitlab/path_regex_spec.rb b/spec/lib/gitlab/path_regex_spec.rb index 2f38ed58727..f0ba0f0459d 100644 --- a/spec/lib/gitlab/path_regex_spec.rb +++ b/spec/lib/gitlab/path_regex_spec.rb @@ -425,6 +425,9 @@ RSpec.describe Gitlab::PathRegex do it { is_expected.not_to match('gitlab.org/') } it { is_expected.not_to match('/gitlab.org') } it { is_expected.not_to match('gitlab git') } + it { is_expected.not_to match('gitlab?') } + it { is_expected.to match('gitlab.org-') } + it { is_expected.to match('gitlab.org_') } end describe '.project_path_format_regex' do @@ -437,6 +440,14 @@ RSpec.describe Gitlab::PathRegex do it { is_expected.not_to match('?gitlab') } it { is_expected.not_to match('git lab') } it { is_expected.not_to match('gitlab.git') } + it { is_expected.not_to match('gitlab?') } + it { is_expected.not_to match('gitlab git') } + it { is_expected.to match('gitlab.org') } + it { is_expected.to match('gitlab.org-') } + it { is_expected.to match('gitlab.org_') } + it { is_expected.to match('gitlab.org.') } + it { is_expected.not_to match('gitlab.org/') } + it { is_expected.not_to match('/gitlab.org') } end context 'repository routes' do diff --git a/spec/lib/gitlab/project_template_spec.rb b/spec/lib/gitlab/project_template_spec.rb index 4eb13e63b46..05417e721c7 100644 --- a/spec/lib/gitlab/project_template_spec.rb +++ b/spec/lib/gitlab/project_template_spec.rb @@ -10,8 +10,8 @@ RSpec.describe Gitlab::ProjectTemplate do gomicro gatsby hugo jekyll plainhtml gitbook hexo sse_middleman gitpod_spring_petclinic nfhugo nfjekyll nfplainhtml nfgitbook nfhexo salesforcedx - serverless_framework jsonnet cluster_management - kotlin_native_linux + serverless_framework tencent_serverless_framework + jsonnet cluster_management kotlin_native_linux ] expect(described_class.all).to be_an(Array) diff --git a/spec/lib/gitlab/prometheus_client_spec.rb b/spec/lib/gitlab/prometheus_client_spec.rb index 82ef4675553..89ddde4a01d 100644 --- a/spec/lib/gitlab/prometheus_client_spec.rb +++ b/spec/lib/gitlab/prometheus_client_spec.rb @@ -107,36 +107,14 @@ RSpec.describe Gitlab::PrometheusClient do let(:prometheus_url) {"https://prometheus.invalid.example.com/api/v1/query?query=1"} shared_examples 'exceptions are raised' do - it 'raises a Gitlab::PrometheusClient::ConnectionError error when a SocketError is rescued' do - req_stub = stub_prometheus_request_with_exception(prometheus_url, SocketError) + Gitlab::HTTP::HTTP_ERRORS.each do |error| + it "raises a Gitlab::PrometheusClient::ConnectionError when a #{error} is rescued" do + req_stub = stub_prometheus_request_with_exception(prometheus_url, error.new) - expect { subject } - .to raise_error(Gitlab::PrometheusClient::ConnectionError, "Can't connect to #{prometheus_url}") - expect(req_stub).to have_been_requested - end - - it 'raises a Gitlab::PrometheusClient::ConnectionError error when a SSLError is rescued' do - req_stub = stub_prometheus_request_with_exception(prometheus_url, OpenSSL::SSL::SSLError) - - expect { subject } - .to raise_error(Gitlab::PrometheusClient::ConnectionError, "#{prometheus_url} contains invalid SSL data") - expect(req_stub).to have_been_requested - end - - it 'raises a Gitlab::PrometheusClient::ConnectionError error when a Gitlab::HTTP::ResponseError is rescued' do - req_stub = stub_prometheus_request_with_exception(prometheus_url, Gitlab::HTTP::ResponseError) - - expect { subject } - .to raise_error(Gitlab::PrometheusClient::ConnectionError, "Network connection error") - expect(req_stub).to have_been_requested - end - - it 'raises a Gitlab::PrometheusClient::ConnectionError error when a Gitlab::HTTP::ResponseError with a code is rescued' do - req_stub = stub_prometheus_request_with_exception(prometheus_url, Gitlab::HTTP::ResponseError.new(code: 400)) - - expect { subject } - .to raise_error(Gitlab::PrometheusClient::ConnectionError, "Network connection error") - expect(req_stub).to have_been_requested + expect { subject } + .to raise_error(Gitlab::PrometheusClient::ConnectionError, kind_of(String)) + expect(req_stub).to have_been_requested + end end end diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb new file mode 100644 index 00000000000..bf1bf65bb9b --- /dev/null +++ b/spec/lib/gitlab/redis/multi_store_spec.rb @@ -0,0 +1,474 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Redis::MultiStore do + using RSpec::Parameterized::TableSyntax + + let_it_be(:redis_store_class) do + Class.new(Gitlab::Redis::Wrapper) do + def config_file_name + config_file_name = "spec/fixtures/config/redis_new_format_host.yml" + Rails.root.join(config_file_name).to_s + end + + def self.name + 'Sessions' + end + end + end + + let_it_be(:primary_db) { 1 } + let_it_be(:secondary_db) { 2 } + let_it_be(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) } + let_it_be(:secondary_store) { create_redis_store(redis_store_class.params, db: secondary_db, serializer: nil) } + let_it_be(:instance_name) { 'TestStore' } + let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)} + + subject { multi_store.send(name, *args) } + + after(:all) do + primary_store.flushdb + secondary_store.flushdb + end + + context 'when primary_store is nil' do + let(:multi_store) { described_class.new(nil, secondary_store, instance_name)} + + it 'fails with exception' do + expect { multi_store }.to raise_error(ArgumentError, /primary_store is required/) + end + end + + context 'when secondary_store is nil' do + let(:multi_store) { described_class.new(primary_store, nil, instance_name)} + + it 'fails with exception' do + expect { multi_store }.to raise_error(ArgumentError, /secondary_store is required/) + end + end + + context 'when primary_store is not a ::Redis instance' do + before do + allow(primary_store).to receive(:is_a?).with(::Redis).and_return(false) + end + + it 'fails with exception' do + expect { described_class.new(primary_store, secondary_store, instance_name) }.to raise_error(ArgumentError, /invalid primary_store/) + end + end + + context 'when secondary_store is not a ::Redis instance' do + before do + allow(secondary_store).to receive(:is_a?).with(::Redis).and_return(false) + end + + it 'fails with exception' do + expect { described_class.new(primary_store, secondary_store, instance_name) }.to raise_error(ArgumentError, /invalid secondary_store/) + end + end + + context 'with READ redis commands' do + let_it_be(:key1) { "redis:{1}:key_a" } + let_it_be(:key2) { "redis:{1}:key_b" } + let_it_be(:value1) { "redis_value1"} + let_it_be(:value2) { "redis_value2"} + let_it_be(:skey) { "redis:set:key" } + let_it_be(:keys) { [key1, key2] } + let_it_be(:values) { [value1, value2] } + let_it_be(:svalues) { [value2, value1] } + + where(:case_name, :name, :args, :value, :block) do + 'execute :get command' | :get | ref(:key1) | ref(:value1) | nil + 'execute :mget command' | :mget | ref(:keys) | ref(:values) | nil + 'execute :mget with block' | :mget | ref(:keys) | ref(:values) | ->(value) { value } + 'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | nil + 'execute :scard command' | :scard | ref(:skey) | 2 | nil + end + + before(:all) do + primary_store.multi do |multi| + multi.set(key1, value1) + multi.set(key2, value2) + multi.sadd(skey, value1) + multi.sadd(skey, value2) + end + + secondary_store.multi do |multi| + multi.set(key1, value1) + multi.set(key2, value2) + multi.sadd(skey, value1) + multi.sadd(skey, value2) + end + end + + RSpec.shared_examples_for 'reads correct value' do + it 'returns the correct value' do + if value.is_a?(Array) + # :smembers does not guarantee the order it will return the values (unsorted set) + is_expected.to match_array(value) + else + is_expected.to eq(value) + end + end + end + + RSpec.shared_examples_for 'fallback read from the secondary store' do + it 'fallback and execute on secondary instance' do + expect(secondary_store).to receive(name).with(*args).and_call_original + + subject + end + + it 'logs the ReadFromPrimaryError' do + expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::ReadFromPrimaryError), + hash_including(command_name: name, extra: hash_including(instance_name: instance_name))) + + subject + end + + it 'increment read fallback count metrics' do + expect(multi_store).to receive(:increment_read_fallback_count).with(name) + + subject + end + + include_examples 'reads correct value' + + context 'when fallback read from the secondary instance raises an exception' do + before do + allow(secondary_store).to receive(name).with(*args).and_raise(StandardError) + end + + it 'fails with exception' do + expect { subject }.to raise_error(StandardError) + end + end + end + + RSpec.shared_examples_for 'secondary store' do + it 'execute on the secondary instance' do + expect(secondary_store).to receive(name).with(*args).and_call_original + + subject + end + + include_examples 'reads correct value' + + it 'does not execute on the primary store' do + expect(primary_store).not_to receive(name) + + subject + end + end + + with_them do + describe "#{name}" do + before do + allow(primary_store).to receive(name).and_call_original + allow(secondary_store).to receive(name).and_call_original + end + + context 'with feature flag :use_multi_store enabled' do + before do + stub_feature_flags(use_multi_store: true) + end + + context 'when reading from the primary is successful' do + it 'returns the correct value' do + expect(primary_store).to receive(name).with(*args).and_call_original + + subject + end + + it 'does not execute on the secondary store' do + expect(secondary_store).not_to receive(name) + + subject + end + + include_examples 'reads correct value' + end + + context 'when reading from primary instance is raising an exception' do + before do + allow(primary_store).to receive(name).with(*args).and_raise(StandardError) + allow(Gitlab::ErrorTracking).to receive(:log_exception) + end + + it 'logs the exception' do + expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError), + hash_including(extra: hash_including(:multi_store_error_message, instance_name: instance_name), + command_name: name)) + + subject + end + + include_examples 'fallback read from the secondary store' + end + + context 'when reading from primary instance return no value' do + before do + allow(primary_store).to receive(name).and_return(nil) + end + + include_examples 'fallback read from the secondary store' + end + + context 'when the command is executed within pipelined block' do + subject do + multi_store.pipelined do + multi_store.send(name, *args) + end + end + + it 'is executed only 1 time on primary instance' do + expect(primary_store).to receive(name).with(*args).once + + subject + end + end + + if params[:block] + subject do + multi_store.send(name, *args, &block) + end + + context 'when block is provided' do + it 'yields to the block' do + expect(primary_store).to receive(name).and_yield(value) + + subject + end + + include_examples 'reads correct value' + end + end + end + + context 'with feature flag :use_multi_store is disabled' do + before do + stub_feature_flags(use_multi_store: false) + end + + it_behaves_like 'secondary store' + end + + context 'with both primary and secondary store using same redis instance' do + let(:primary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) } + let(:secondary_store) { create_redis_store(redis_store_class.params, db: primary_db, serializer: nil) } + let(:multi_store) { described_class.new(primary_store, secondary_store, instance_name)} + + it_behaves_like 'secondary store' + end + end + end + end + + context 'with WRITE redis commands' do + let_it_be(:key1) { "redis:{1}:key_a" } + let_it_be(:key2) { "redis:{1}:key_b" } + let_it_be(:value1) { "redis_value1"} + let_it_be(:value2) { "redis_value2"} + let_it_be(:key1_value1) { [key1, value1] } + let_it_be(:key1_value2) { [key1, value2] } + let_it_be(:ttl) { 10 } + let_it_be(:key1_ttl_value1) { [key1, ttl, value1] } + let_it_be(:skey) { "redis:set:key" } + let_it_be(:svalues1) { [value2, value1] } + let_it_be(:svalues2) { [value1] } + let_it_be(:skey_value1) { [skey, value1] } + let_it_be(:skey_value2) { [skey, value2] } + + where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do + 'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1) + 'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2) + 'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1) + 'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey) + 'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey) + 'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2) + 'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil + end + + before do + primary_store.flushdb + secondary_store.flushdb + + primary_store.multi do |multi| + multi.set(key2, value1) + multi.sadd(skey, value1) + end + + secondary_store.multi do |multi| + multi.set(key2, value1) + multi.sadd(skey, value1) + end + end + + RSpec.shared_examples_for 'verify that store contains values' do |store| + it "#{store} redis store contains correct values", :aggregate_errors do + subject + + redis_store = multi_store.send(store) + + if expected_value.is_a?(Array) + # :smembers does not guarantee the order it will return the values + expect(redis_store.send(verification_name, *verification_args)).to match_array(expected_value) + else + expect(redis_store.send(verification_name, *verification_args)).to eq(expected_value) + end + end + end + + with_them do + describe "#{name}" do + let(:expected_args) {args || no_args } + + before do + allow(primary_store).to receive(name).and_call_original + allow(secondary_store).to receive(name).and_call_original + end + + context 'with feature flag :use_multi_store enabled' do + before do + stub_feature_flags(use_multi_store: true) + end + + context 'when executing on primary instance is successful' do + it 'executes on both primary and secondary redis store', :aggregate_errors do + expect(primary_store).to receive(name).with(*expected_args).and_call_original + expect(secondary_store).to receive(name).with(*expected_args).and_call_original + + subject + end + + include_examples 'verify that store contains values', :primary_store + include_examples 'verify that store contains values', :secondary_store + end + + context 'when executing on the primary instance is raising an exception' do + before do + allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError) + allow(Gitlab::ErrorTracking).to receive(:log_exception) + end + + it 'logs the exception and execute on secondary instance', :aggregate_errors do + expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(StandardError), + hash_including(extra: hash_including(:multi_store_error_message), command_name: name)) + expect(secondary_store).to receive(name).with(*expected_args).and_call_original + + subject + end + + include_examples 'verify that store contains values', :secondary_store + end + + context 'when the command is executed within pipelined block' do + subject do + multi_store.pipelined do + multi_store.send(name, *args) + end + end + + it 'is executed only 1 time on each instance', :aggregate_errors do + expect(primary_store).to receive(name).with(*expected_args).once + expect(secondary_store).to receive(name).with(*expected_args).once + + subject + end + + include_examples 'verify that store contains values', :primary_store + include_examples 'verify that store contains values', :secondary_store + end + end + + context 'with feature flag :use_multi_store is disabled' do + before do + stub_feature_flags(use_multi_store: false) + end + + it 'executes only on the secondary redis store', :aggregate_errors do + expect(secondary_store).to receive(name).with(*expected_args) + expect(primary_store).not_to receive(name).with(*expected_args) + + subject + end + + include_examples 'verify that store contains values', :secondary_store + end + end + end + end + + context 'with unsupported command' do + before do + primary_store.flushdb + secondary_store.flushdb + end + + let_it_be(:key) { "redis:counter" } + + subject do + multi_store.incr(key) + end + + it 'executes method missing' do + expect(multi_store).to receive(:method_missing) + + subject + end + + it 'logs MethodMissingError' do + expect(Gitlab::ErrorTracking).to receive(:log_exception).with(an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError), + hash_including(command_name: :incr, extra: hash_including(instance_name: instance_name))) + + subject + end + + it 'increments method missing counter' do + expect(multi_store).to receive(:increment_method_missing_count).with(:incr) + + subject + end + + it 'fallback and executes only on the secondary store', :aggregate_errors do + expect(secondary_store).to receive(:incr).with(key).and_call_original + expect(primary_store).not_to receive(:incr) + + subject + end + + it 'correct value is stored on the secondary store', :aggregate_errors do + subject + + expect(primary_store.get(key)).to be_nil + expect(secondary_store.get(key)).to eq('1') + end + + context 'when the command is executed within pipelined block' do + subject do + multi_store.pipelined do + multi_store.incr(key) + end + end + + it 'is executed only 1 time on each instance', :aggregate_errors do + expect(primary_store).to receive(:incr).with(key).once + expect(secondary_store).to receive(:incr).with(key).once + + subject + end + + it "both redis stores are containing correct values", :aggregate_errors do + subject + + expect(primary_store.get(key)).to eq('1') + expect(secondary_store.get(key)).to eq('1') + end + end + end + + def create_redis_store(options, extras = {}) + ::Redis::Store.new(options.merge(extras)) + end +end diff --git a/spec/lib/gitlab/runtime_spec.rb b/spec/lib/gitlab/runtime_spec.rb index f51c5dd3d20..4627a8db82e 100644 --- a/spec/lib/gitlab/runtime_spec.rb +++ b/spec/lib/gitlab/runtime_spec.rb @@ -48,10 +48,9 @@ RSpec.describe Gitlab::Runtime do before do stub_const('::Puma', puma_type) - stub_env('ACTION_CABLE_IN_APP', 'false') end - it_behaves_like "valid runtime", :puma, 1 + it_behaves_like "valid runtime", :puma, 1 + Gitlab::ActionCable::Config.worker_pool_size end context "puma with cli_config" do @@ -61,27 +60,16 @@ RSpec.describe Gitlab::Runtime do before do stub_const('::Puma', puma_type) allow(puma_type).to receive_message_chain(:cli_config, :options).and_return(max_threads: 2, workers: max_workers) - stub_env('ACTION_CABLE_IN_APP', 'false') end - it_behaves_like "valid runtime", :puma, 3 + it_behaves_like "valid runtime", :puma, 3 + Gitlab::ActionCable::Config.worker_pool_size - context "when ActionCable in-app mode is enabled" do + context "when ActionCable worker pool size is configured" do before do - stub_env('ACTION_CABLE_IN_APP', 'true') - stub_env('ACTION_CABLE_WORKER_POOL_SIZE', '3') + stub_env('ACTION_CABLE_WORKER_POOL_SIZE', 10) end - it_behaves_like "valid runtime", :puma, 6 - end - - context "when ActionCable standalone is run" do - before do - stub_const('ACTION_CABLE_SERVER', true) - stub_env('ACTION_CABLE_WORKER_POOL_SIZE', '8') - end - - it_behaves_like "valid runtime", :puma, 11 + it_behaves_like "valid runtime", :puma, 13 end describe ".puma_in_clustered_mode?" do @@ -108,7 +96,7 @@ RSpec.describe Gitlab::Runtime do allow(sidekiq_type).to receive(:options).and_return(concurrency: 2) end - it_behaves_like "valid runtime", :sidekiq, 4 + it_behaves_like "valid runtime", :sidekiq, 5 end context "console" do diff --git a/spec/lib/gitlab/search_results_spec.rb b/spec/lib/gitlab/search_results_spec.rb index 27d65e14347..a38073e7c51 100644 --- a/spec/lib/gitlab/search_results_spec.rb +++ b/spec/lib/gitlab/search_results_spec.rb @@ -96,6 +96,18 @@ RSpec.describe Gitlab::SearchResults do end end + describe '#aggregations' do + where(:scope) do + %w(projects issues merge_requests blobs commits wiki_blobs epics milestones users unknown) + end + + with_them do + it 'returns an empty array' do + expect(results.aggregations(scope)).to match_array([]) + end + end + end + context "when count_limit is lower than total amount" do before do allow(results).to receive(:count_limit).and_return(1) diff --git a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb deleted file mode 100644 index e818b03cf75..00000000000 --- a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb +++ /dev/null @@ -1,334 +0,0 @@ -# frozen_string_literal: true - -require 'fast_spec_helper' -require 'rspec-parameterized' - -RSpec.describe Gitlab::SidekiqCluster::CLI do - let(:cli) { described_class.new('/dev/null') } - let(:timeout) { described_class::DEFAULT_SOFT_TIMEOUT_SECONDS } - let(:default_options) do - { env: 'test', directory: Dir.pwd, max_concurrency: 50, min_concurrency: 0, dryrun: false, timeout: timeout } - end - - before do - stub_env('RAILS_ENV', 'test') - end - - describe '#run' do - context 'without any arguments' do - it 'raises CommandError' do - expect { cli.run([]) }.to raise_error(described_class::CommandError) - end - end - - context 'with arguments' do - before do - allow(cli).to receive(:write_pid) - allow(cli).to receive(:trap_signals) - allow(cli).to receive(:start_loop) - end - - it 'starts the Sidekiq workers' do - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([['foo']], default_options) - .and_return([]) - - cli.run(%w(foo)) - end - - it 'allows the special * selector' do - worker_queues = %w(foo bar baz) - - expect(Gitlab::SidekiqConfig::CliMethods) - .to receive(:worker_queues).and_return(worker_queues) - - expect(Gitlab::SidekiqCluster) - .to receive(:start).with([worker_queues], default_options) - - cli.run(%w(*)) - end - - it 'raises an error when the arguments contain newlines' do - invalid_arguments = [ - ["foo\n"], - ["foo\r"], - %W[foo b\nar] - ] - - invalid_arguments.each do |arguments| - expect { cli.run(arguments) }.to raise_error(described_class::CommandError) - end - end - - context 'with --negate flag' do - it 'starts Sidekiq workers for all queues in all_queues.yml except the ones in argv' do - expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['baz']) - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([['baz']], default_options) - .and_return([]) - - cli.run(%w(foo -n)) - end - end - - context 'with --max-concurrency flag' do - it 'starts Sidekiq workers for specified queues with a max concurrency' do - expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz)) - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([%w(foo bar baz), %w(solo)], default_options.merge(max_concurrency: 2)) - .and_return([]) - - cli.run(%w(foo,bar,baz solo -m 2)) - end - end - - context 'with --min-concurrency flag' do - it 'starts Sidekiq workers for specified queues with a min concurrency' do - expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(%w(foo bar baz)) - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([%w(foo bar baz), %w(solo)], default_options.merge(min_concurrency: 2)) - .and_return([]) - - cli.run(%w(foo,bar,baz solo --min-concurrency 2)) - end - end - - context 'with --timeout flag' do - it 'when given', 'starts Sidekiq workers with given timeout' do - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([['foo']], default_options.merge(timeout: 10)) - - cli.run(%w(foo --timeout 10)) - end - - it 'when not given', 'starts Sidekiq workers with default timeout' do - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([['foo']], default_options.merge(timeout: described_class::DEFAULT_SOFT_TIMEOUT_SECONDS)) - - cli.run(%w(foo)) - end - end - - context 'with --list-queues flag' do - it 'errors when given --list-queues and --dryrun' do - expect { cli.run(%w(foo --list-queues --dryrun)) }.to raise_error(described_class::CommandError) - end - - it 'prints out a list of queues in alphabetical order' do - expected_queues = [ - 'epics:epics_update_epics_dates', - 'epics_new_epic_issue', - 'new_epic', - 'todos_destroyer:todos_destroyer_confidential_epic' - ] - - allow(Gitlab::SidekiqConfig::CliMethods).to receive(:query_queues).and_return(expected_queues.shuffle) - - expect(cli).to receive(:puts).with([expected_queues]) - - cli.run(%w(--queue-selector feature_category=epics --list-queues)) - end - end - - context 'queue namespace expansion' do - it 'starts Sidekiq workers for all queues in all_queues.yml with a namespace in argv' do - expect(Gitlab::SidekiqConfig::CliMethods).to receive(:worker_queues).and_return(['cronjob:foo', 'cronjob:bar']) - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([['cronjob', 'cronjob:foo', 'cronjob:bar']], default_options) - .and_return([]) - - cli.run(%w(cronjob)) - end - end - - context "with --queue-selector" do - where do - { - 'memory-bound queues' => { - query: 'resource_boundary=memory', - included_queues: %w(project_export), - excluded_queues: %w(merge) - }, - 'memory- or CPU-bound queues' => { - query: 'resource_boundary=memory,cpu', - included_queues: %w(auto_merge:auto_merge_process project_export), - excluded_queues: %w(merge) - }, - 'high urgency CI queues' => { - query: 'feature_category=continuous_integration&urgency=high', - included_queues: %w(pipeline_cache:expire_job_cache pipeline_cache:expire_pipeline_cache), - excluded_queues: %w(merge) - }, - 'CPU-bound high urgency CI queues' => { - query: 'feature_category=continuous_integration&urgency=high&resource_boundary=cpu', - included_queues: %w(pipeline_cache:expire_pipeline_cache), - excluded_queues: %w(pipeline_cache:expire_job_cache merge) - }, - 'CPU-bound high urgency non-CI queues' => { - query: 'feature_category!=continuous_integration&urgency=high&resource_boundary=cpu', - included_queues: %w(new_issue), - excluded_queues: %w(pipeline_cache:expire_pipeline_cache) - }, - 'CI and SCM queues' => { - query: 'feature_category=continuous_integration|feature_category=source_code_management', - included_queues: %w(pipeline_cache:expire_job_cache merge), - excluded_queues: %w(mailers) - } - } - end - - with_them do - it 'expands queues by attributes' do - expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts| - expect(opts).to eq(default_options) - expect(queues.first).to include(*included_queues) - expect(queues.first).not_to include(*excluded_queues) - - [] - end - - cli.run(%W(--queue-selector #{query})) - end - - it 'works when negated' do - expect(Gitlab::SidekiqCluster).to receive(:start) do |queues, opts| - expect(opts).to eq(default_options) - expect(queues.first).not_to include(*included_queues) - expect(queues.first).to include(*excluded_queues) - - [] - end - - cli.run(%W(--negate --queue-selector #{query})) - end - end - - it 'expands multiple queue groups correctly' do - expect(Gitlab::SidekiqCluster) - .to receive(:start) - .with([['chat_notification'], ['project_export']], default_options) - .and_return([]) - - cli.run(%w(--queue-selector feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers)) - end - - it 'allows the special * selector' do - worker_queues = %w(foo bar baz) - - expect(Gitlab::SidekiqConfig::CliMethods) - .to receive(:worker_queues).and_return(worker_queues) - - expect(Gitlab::SidekiqCluster) - .to receive(:start).with([worker_queues], default_options) - - cli.run(%w(--queue-selector *)) - end - - it 'errors when the selector matches no queues' do - expect(Gitlab::SidekiqCluster).not_to receive(:start) - - expect { cli.run(%w(--queue-selector has_external_dependencies=true&has_external_dependencies=false)) } - .to raise_error(described_class::CommandError) - end - - it 'errors on an invalid query multiple queue groups correctly' do - expect(Gitlab::SidekiqCluster).not_to receive(:start) - - expect { cli.run(%w(--queue-selector unknown_field=chatops)) } - .to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError) - end - end - end - end - - describe '#write_pid' do - context 'when a PID is specified' do - it 'writes the PID to a file' do - expect(Gitlab::SidekiqCluster).to receive(:write_pid).with('/dev/null') - - cli.option_parser.parse!(%w(-P /dev/null)) - cli.write_pid - end - end - - context 'when no PID is specified' do - it 'does not write a PID' do - expect(Gitlab::SidekiqCluster).not_to receive(:write_pid) - - cli.write_pid - end - end - end - - describe '#wait_for_termination' do - it 'waits for termination of all sub-processes and succeeds after 3 checks' do - expect(Gitlab::SidekiqCluster).to receive(:any_alive?) - .with(an_instance_of(Array)).and_return(true, true, true, false) - - expect(Gitlab::SidekiqCluster).to receive(:pids_alive) - .with([]).and_return([]) - - expect(Gitlab::SidekiqCluster).to receive(:signal_processes) - .with([], "-KILL") - - stub_const("Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1) - allow(cli).to receive(:terminate_timeout_seconds) { 1 } - - cli.wait_for_termination - end - - context 'with hanging workers' do - before do - expect(cli).to receive(:write_pid) - expect(cli).to receive(:trap_signals) - expect(cli).to receive(:start_loop) - end - - it 'hard kills workers after timeout expires' do - worker_pids = [101, 102, 103] - expect(Gitlab::SidekiqCluster).to receive(:start) - .with([['foo']], default_options) - .and_return(worker_pids) - - expect(Gitlab::SidekiqCluster).to receive(:any_alive?) - .with(worker_pids).and_return(true).at_least(10).times - - expect(Gitlab::SidekiqCluster).to receive(:pids_alive) - .with(worker_pids).and_return([102]) - - expect(Gitlab::SidekiqCluster).to receive(:signal_processes) - .with([102], "-KILL") - - cli.run(%w(foo)) - - stub_const("Gitlab::SidekiqCluster::CLI::CHECK_TERMINATE_INTERVAL_SECONDS", 0.1) - allow(cli).to receive(:terminate_timeout_seconds) { 1 } - - cli.wait_for_termination - end - end - end - - describe '#trap_signals' do - it 'traps the termination and forwarding signals' do - expect(Gitlab::SidekiqCluster).to receive(:trap_terminate) - expect(Gitlab::SidekiqCluster).to receive(:trap_forward) - - cli.trap_signals - end - end - - describe '#start_loop' do - it 'runs until one of the processes has been terminated' do - allow(cli).to receive(:sleep).with(a_kind_of(Numeric)) - - expect(Gitlab::SidekiqCluster).to receive(:all_alive?) - .with(an_instance_of(Array)).and_return(false) - - expect(Gitlab::SidekiqCluster).to receive(:signal_processes) - .with(an_instance_of(Array), :TERM) - - cli.start_loop - end - end -end diff --git a/spec/lib/gitlab/sidekiq_cluster_spec.rb b/spec/lib/gitlab/sidekiq_cluster_spec.rb deleted file mode 100644 index 3c6ea054968..00000000000 --- a/spec/lib/gitlab/sidekiq_cluster_spec.rb +++ /dev/null @@ -1,207 +0,0 @@ -# frozen_string_literal: true - -require 'fast_spec_helper' -require 'rspec-parameterized' - -RSpec.describe Gitlab::SidekiqCluster do - describe '.trap_signals' do - it 'traps the given signals' do - expect(described_class).to receive(:trap).ordered.with(:INT) - expect(described_class).to receive(:trap).ordered.with(:HUP) - - described_class.trap_signals(%i(INT HUP)) - end - end - - describe '.trap_terminate' do - it 'traps the termination signals' do - expect(described_class).to receive(:trap_signals) - .with(described_class::TERMINATE_SIGNALS) - - described_class.trap_terminate { } - end - end - - describe '.trap_forward' do - it 'traps the signals to forward' do - expect(described_class).to receive(:trap_signals) - .with(described_class::FORWARD_SIGNALS) - - described_class.trap_forward { } - end - end - - describe '.signal' do - it 'sends a signal to the given process' do - allow(Process).to receive(:kill).with(:INT, 4) - expect(described_class.signal(4, :INT)).to eq(true) - end - - it 'returns false when the process does not exist' do - allow(Process).to receive(:kill).with(:INT, 4).and_raise(Errno::ESRCH) - expect(described_class.signal(4, :INT)).to eq(false) - end - end - - describe '.signal_processes' do - it 'sends a signal to every given process' do - expect(described_class).to receive(:signal).with(1, :INT) - - described_class.signal_processes([1], :INT) - end - end - - describe '.start' do - it 'starts Sidekiq with the given queues, environment and options' do - expected_options = { - env: :production, - directory: 'foo/bar', - max_concurrency: 20, - min_concurrency: 10, - timeout: 25, - dryrun: true - } - - expect(described_class).to receive(:start_sidekiq).ordered.with(%w(foo), expected_options.merge(worker_id: 0)) - expect(described_class).to receive(:start_sidekiq).ordered.with(%w(bar baz), expected_options.merge(worker_id: 1)) - - described_class.start([%w(foo), %w(bar baz)], env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 10, dryrun: true) - end - - it 'starts Sidekiq with the given queues and sensible default options' do - expected_options = { - env: :development, - directory: an_instance_of(String), - max_concurrency: 50, - min_concurrency: 0, - worker_id: an_instance_of(Integer), - timeout: 25, - dryrun: false - } - - expect(described_class).to receive(:start_sidekiq).ordered.with(%w(foo bar baz), expected_options) - expect(described_class).to receive(:start_sidekiq).ordered.with(%w(solo), expected_options) - - described_class.start([%w(foo bar baz), %w(solo)]) - end - end - - describe '.start_sidekiq' do - let(:first_worker_id) { 0 } - let(:options) do - { env: :production, directory: 'foo/bar', max_concurrency: 20, min_concurrency: 0, worker_id: first_worker_id, timeout: 10, dryrun: false } - end - - let(:env) { { "ENABLE_SIDEKIQ_CLUSTER" => "1", "SIDEKIQ_WORKER_ID" => first_worker_id.to_s } } - let(:args) { ['bundle', 'exec', 'sidekiq', anything, '-eproduction', '-t10', *([anything] * 5)] } - - it 'starts a Sidekiq process' do - allow(Process).to receive(:spawn).and_return(1) - - expect(described_class).to receive(:wait_async).with(1) - expect(described_class.start_sidekiq(%w(foo), **options)).to eq(1) - end - - it 'handles duplicate queue names' do - allow(Process) - .to receive(:spawn) - .with(env, *args, anything) - .and_return(1) - - expect(described_class).to receive(:wait_async).with(1) - expect(described_class.start_sidekiq(%w(foo foo bar baz), **options)).to eq(1) - end - - it 'runs the sidekiq process in a new process group' do - expect(Process) - .to receive(:spawn) - .with(anything, *args, a_hash_including(pgroup: true)) - .and_return(1) - - allow(described_class).to receive(:wait_async) - expect(described_class.start_sidekiq(%w(foo bar baz), **options)).to eq(1) - end - end - - describe '.count_by_queue' do - it 'tallies the queue counts' do - queues = [%w(foo), %w(bar baz), %w(foo)] - - expect(described_class.count_by_queue(queues)).to eq(%w(foo) => 2, %w(bar baz) => 1) - end - end - - describe '.concurrency' do - using RSpec::Parameterized::TableSyntax - - where(:queue_count, :min, :max, :expected) do - 2 | 0 | 0 | 3 # No min or max specified - 2 | 0 | 9 | 3 # No min specified, value < max - 2 | 1 | 4 | 3 # Value between min and max - 2 | 4 | 5 | 4 # Value below range - 5 | 2 | 3 | 3 # Value above range - 2 | 1 | 1 | 1 # Value above explicit setting (min == max) - 0 | 3 | 3 | 3 # Value below explicit setting (min == max) - 1 | 4 | 3 | 3 # Min greater than max - end - - with_them do - let(:queues) { Array.new(queue_count) } - - it { expect(described_class.concurrency(queues, min, max)).to eq(expected) } - end - end - - describe '.wait_async' do - it 'waits for a process in a separate thread' do - thread = described_class.wait_async(Process.spawn('true')) - - # Upon success Process.wait just returns the PID. - expect(thread.value).to be_a_kind_of(Numeric) - end - end - - # In the X_alive? checks, we check negative PIDs sometimes as a simple way - # to be sure the pids are definitely for non-existent processes. - # Note that -1 is special, and sends the signal to every process we have permission - # for, so we use -2, -3 etc - describe '.all_alive?' do - it 'returns true if all processes are alive' do - processes = [Process.pid] - - expect(described_class.all_alive?(processes)).to eq(true) - end - - it 'returns false when a thread was not alive' do - processes = [-2] - - expect(described_class.all_alive?(processes)).to eq(false) - end - end - - describe '.any_alive?' do - it 'returns true if at least one process is alive' do - processes = [Process.pid, -2] - - expect(described_class.any_alive?(processes)).to eq(true) - end - - it 'returns false when all threads are dead' do - processes = [-2, -3] - - expect(described_class.any_alive?(processes)).to eq(false) - end - end - - describe '.write_pid' do - it 'writes the PID of the current process to the given file' do - handle = double(:handle) - - allow(File).to receive(:open).with('/dev/null', 'w').and_yield(handle) - - expect(handle).to receive(:write).with(Process.pid.to_s) - - described_class.write_pid('/dev/null') - end - end -end diff --git a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb index bc63289a344..576b36c1829 100644 --- a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb +++ b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb @@ -11,12 +11,12 @@ RSpec.describe Gitlab::SidekiqConfig::CliMethods do end def stub_exists(exists: true) - ['app/workers/all_queues.yml', 'ee/app/workers/all_queues.yml'].each do |path| + ['app/workers/all_queues.yml', 'ee/app/workers/all_queues.yml', 'jh/app/workers/all_queues.yml'].each do |path| allow(File).to receive(:exist?).with(expand_path(path)).and_return(exists) end end - def stub_contents(foss_queues, ee_queues) + def stub_contents(foss_queues, ee_queues, jh_queues) allow(YAML).to receive(:load_file) .with(expand_path('app/workers/all_queues.yml')) .and_return(foss_queues) @@ -24,6 +24,10 @@ RSpec.describe Gitlab::SidekiqConfig::CliMethods do allow(YAML).to receive(:load_file) .with(expand_path('ee/app/workers/all_queues.yml')) .and_return(ee_queues) + + allow(YAML).to receive(:load_file) + .with(expand_path('jh/app/workers/all_queues.yml')) + .and_return(jh_queues) end before do @@ -45,8 +49,9 @@ RSpec.describe Gitlab::SidekiqConfig::CliMethods do end it 'flattens and joins the contents' do - expected_queues = %w[queue_a queue_b] - expected_queues = expected_queues.first(1) unless Gitlab.ee? + expected_queues = %w[queue_a] + expected_queues << 'queue_b' if Gitlab.ee? + expected_queues << 'queue_c' if Gitlab.jh? expect(described_class.worker_queues(dummy_root)) .to match_array(expected_queues) @@ -55,7 +60,7 @@ RSpec.describe Gitlab::SidekiqConfig::CliMethods do context 'when the file contains an array of hashes' do before do - stub_contents([{ name: 'queue_a' }], [{ name: 'queue_b' }]) + stub_contents([{ name: 'queue_a' }], [{ name: 'queue_b' }], [{ name: 'queue_c' }]) end include_examples 'valid file contents' diff --git a/spec/lib/gitlab/sidekiq_config/worker_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_spec.rb index f4d7a4b3359..9c252b3d50b 100644 --- a/spec/lib/gitlab/sidekiq_config/worker_spec.rb +++ b/spec/lib/gitlab/sidekiq_config/worker_spec.rb @@ -18,19 +18,26 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do get_tags: attributes[:tags] ) - described_class.new(inner_worker, ee: false) + described_class.new(inner_worker, ee: false, jh: false) end describe '#ee?' do it 'returns the EE status set on creation' do - expect(described_class.new(double, ee: true)).to be_ee - expect(described_class.new(double, ee: false)).not_to be_ee + expect(described_class.new(double, ee: true, jh: false)).to be_ee + expect(described_class.new(double, ee: false, jh: false)).not_to be_ee + end + end + + describe '#jh?' do + it 'returns the JH status set on creation' do + expect(described_class.new(double, ee: false, jh: true)).to be_jh + expect(described_class.new(double, ee: false, jh: false)).not_to be_jh end end describe '#==' do def worker_with_yaml(yaml) - described_class.new(double, ee: false).tap do |worker| + described_class.new(double, ee: false, jh: false).tap do |worker| allow(worker).to receive(:to_yaml).and_return(yaml) end end @@ -57,7 +64,7 @@ RSpec.describe Gitlab::SidekiqConfig::Worker do expect(worker).to receive(meth) - described_class.new(worker, ee: false).send(meth) + described_class.new(worker, ee: false, jh: false).send(meth) end end end diff --git a/spec/lib/gitlab/sidekiq_enq_spec.rb b/spec/lib/gitlab/sidekiq_enq_spec.rb new file mode 100644 index 00000000000..6903f01bf5f --- /dev/null +++ b/spec/lib/gitlab/sidekiq_enq_spec.rb @@ -0,0 +1,93 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::SidekiqEnq, :clean_gitlab_redis_queues do + let(:retry_set) { Sidekiq::Scheduled::SETS.first } + let(:schedule_set) { Sidekiq::Scheduled::SETS.last } + + around do |example| + freeze_time { example.run } + end + + shared_examples 'finds jobs that are due and enqueues them' do + before do + Sidekiq.redis do |redis| + redis.zadd(retry_set, (Time.current - 1.day).to_f.to_s, '{"jid": 1}') + redis.zadd(retry_set, Time.current.to_f.to_s, '{"jid": 2}') + redis.zadd(retry_set, (Time.current + 1.day).to_f.to_s, '{"jid": 3}') + + redis.zadd(schedule_set, (Time.current - 1.day).to_f.to_s, '{"jid": 4}') + redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 5}') + redis.zadd(schedule_set, (Time.current + 1.day).to_f.to_s, '{"jid": 6}') + end + end + + it 'enqueues jobs that are due' do + expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 1 }) + expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 2 }) + expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 4 }) + expect(Sidekiq::Client).to receive(:push).with({ 'jid' => 5 }) + + Gitlab::SidekiqEnq.new.enqueue_jobs + + Sidekiq.redis do |redis| + expect(redis.zscan_each(retry_set).map(&:first)).to contain_exactly('{"jid": 3}') + expect(redis.zscan_each(schedule_set).map(&:first)).to contain_exactly('{"jid": 6}') + end + end + end + + context 'when atomic_sidekiq_scheduler is disabled' do + before do + stub_feature_flags(atomic_sidekiq_scheduler: false) + end + + it_behaves_like 'finds jobs that are due and enqueues them' + + context 'when ZRANGEBYSCORE returns a job that is already removed by another process' do + before do + Sidekiq.redis do |redis| + redis.zadd(schedule_set, Time.current.to_f.to_s, '{"jid": 1}') + + allow(redis).to receive(:zrangebyscore).and_wrap_original do |m, *args, **kwargs| + m.call(*args, **kwargs).tap do |jobs| + redis.zrem(schedule_set, jobs.first) if args[0] == schedule_set && jobs.first + end + end + end + end + + it 'calls ZREM but does not enqueue the job' do + Sidekiq.redis do |redis| + expect(redis).to receive(:zrem).with(schedule_set, '{"jid": 1}').twice.and_call_original + end + expect(Sidekiq::Client).not_to receive(:push) + + Gitlab::SidekiqEnq.new.enqueue_jobs + end + end + end + + context 'when atomic_sidekiq_scheduler is enabled' do + before do + stub_feature_flags(atomic_sidekiq_scheduler: true) + end + + context 'when Lua script is not yet loaded' do + before do + Gitlab::Redis::Queues.with { |redis| redis.script(:flush) } + end + + it_behaves_like 'finds jobs that are due and enqueues them' + end + + context 'when Lua script is already loaded' do + before do + Gitlab::SidekiqEnq.new.enqueue_jobs + end + + it_behaves_like 'finds jobs that are due and enqueues them' + end + end +end diff --git a/spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb index 82f927fe481..f44a1e8b6ba 100644 --- a/spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb +++ b/spec/lib/gitlab/sidekiq_logging/deduplication_logger_spec.rb @@ -23,11 +23,37 @@ RSpec.describe Gitlab::SidekiqLogging::DeduplicationLogger do } expect(Sidekiq.logger).to receive(:info).with(a_hash_including(expected_payload)).and_call_original - described_class.instance.log(job, "a fancy strategy", { foo: :bar }) + described_class.instance.deduplicated_log(job, "a fancy strategy", { foo: :bar }) end it "does not modify the job" do - expect { described_class.instance.log(job, "a fancy strategy") } + expect { described_class.instance.deduplicated_log(job, "a fancy strategy") } + .not_to change { job } + end + end + + describe '#rescheduled_log' do + let(:job) do + { + 'class' => 'TestWorker', + 'args' => [1234, 'hello', { 'key' => 'value' }], + 'jid' => 'da883554ee4fe414012f5f42', + 'correlation_id' => 'cid' + } + end + + it 'logs a rescheduled message to the sidekiq logger' do + expected_payload = { + 'job_status' => 'rescheduled', + 'message' => "#{job['class']} JID-#{job['jid']}: rescheduled" + } + expect(Sidekiq.logger).to receive(:info).with(a_hash_including(expected_payload)).and_call_original + + described_class.instance.rescheduled_log(job) + end + + it 'does not modify the job' do + expect { described_class.instance.rescheduled_log(job) } .not_to change { job } end end diff --git a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb index c879fdea3ad..b6fb3fecf20 100644 --- a/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb +++ b/spec/lib/gitlab/sidekiq_logging/json_formatter_spec.rb @@ -17,6 +17,7 @@ RSpec.describe Gitlab::SidekiqLogging::JSONFormatter do 'class' => 'PostReceive', 'bar' => 'test', 'created_at' => timestamp, + 'scheduled_at' => timestamp, 'enqueued_at' => timestamp, 'started_at' => timestamp, 'retried_at' => timestamp, @@ -31,6 +32,7 @@ RSpec.describe Gitlab::SidekiqLogging::JSONFormatter do 'severity' => 'INFO', 'time' => timestamp_iso8601, 'created_at' => timestamp_iso8601, + 'scheduled_at' => timestamp_iso8601, 'enqueued_at' => timestamp_iso8601, 'started_at' => timestamp_iso8601, 'retried_at' => timestamp_iso8601, diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb index 5083ac514db..833de6ae624 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb @@ -24,6 +24,10 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi "#{Gitlab::Redis::Queues::SIDEKIQ_NAMESPACE}:duplicate:#{queue}:#{hash}" end + let(:deduplicated_flag_key) do + "#{idempotency_key}:deduplicate_flag" + end + describe '#schedule' do shared_examples 'scheduling with deduplication class' do |strategy_class| it 'calls schedule on the strategy' do @@ -81,25 +85,43 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi context 'when there was no job in the queue yet' do it { expect(duplicate_job.check!).to eq('123') } - it "adds a idempotency key with ttl set to #{described_class::DUPLICATE_KEY_TTL}" do - expect { duplicate_job.check! } - .to change { read_idempotency_key_with_ttl(idempotency_key) } - .from([nil, -2]) - .to(['123', be_within(1).of(described_class::DUPLICATE_KEY_TTL)]) - end - - context 'when wal locations is not empty' do - it "adds a existing wal locations key with ttl set to #{described_class::DUPLICATE_KEY_TTL}" do + shared_examples 'sets Redis keys with correct TTL' do + it "adds an idempotency key with correct ttl" do expect { duplicate_job.check! } - .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) } - .from([nil, -2]) - .to([wal_locations[:main], be_within(1).of(described_class::DUPLICATE_KEY_TTL)]) - .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) } + .to change { read_idempotency_key_with_ttl(idempotency_key) } .from([nil, -2]) - .to([wal_locations[:ci], be_within(1).of(described_class::DUPLICATE_KEY_TTL)]) + .to(['123', be_within(1).of(expected_ttl)]) + end + + context 'when wal locations is not empty' do + it "adds an existing wal locations key with correct ttl" do + expect { duplicate_job.check! } + .to change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :main)) } + .from([nil, -2]) + .to([wal_locations[:main], be_within(1).of(expected_ttl)]) + .and change { read_idempotency_key_with_ttl(existing_wal_location_key(idempotency_key, :ci)) } + .from([nil, -2]) + .to([wal_locations[:ci], be_within(1).of(expected_ttl)]) + end end end + context 'with TTL option is not set' do + let(:expected_ttl) { described_class::DEFAULT_DUPLICATE_KEY_TTL } + + it_behaves_like 'sets Redis keys with correct TTL' + end + + context 'when TTL option is set' do + let(:expected_ttl) { 5.minutes } + + before do + allow(duplicate_job).to receive(:options).and_return({ ttl: expected_ttl }) + end + + it_behaves_like 'sets Redis keys with correct TTL' + end + context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do before do stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false) @@ -152,26 +174,21 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end describe '#update_latest_wal_location!' do - let(:offset) { '1024' } - before do - allow(duplicate_job).to receive(:pg_wal_lsn_diff).with(:main).and_return(offset) - allow(duplicate_job).to receive(:pg_wal_lsn_diff).with(:ci).and_return(offset) - end + allow(Gitlab::Database).to receive(:database_base_models).and_return( + { main: ::ActiveRecord::Base, + ci: ::ActiveRecord::Base }) - shared_examples 'updates wal location' do - it 'updates a wal location to redis with an offset' do - expect { duplicate_job.update_latest_wal_location! } - .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) } - .from(existing_wal_with_offset[:main]) - .to(new_wal_with_offset[:main]) - .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) } - .from(existing_wal_with_offset[:ci]) - .to(new_wal_with_offset[:ci]) - end + set_idempotency_key(existing_wal_location_key(idempotency_key, :main), existing_wal[:main]) + set_idempotency_key(existing_wal_location_key(idempotency_key, :ci), existing_wal[:ci]) + + # read existing_wal_locations + duplicate_job.check! end context 'when preserve_latest_wal_locations_for_idempotent_jobs feature flag is disabled' do + let(:existing_wal) { {} } + before do stub_feature_flags(preserve_latest_wal_locations_for_idempotent_jobs: false) end @@ -192,42 +209,107 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end context "when the key doesn't exists in redis" do - include_examples 'updates wal location' do - let(:existing_wal_with_offset) { { main: [], ci: [] } } - let(:new_wal_with_offset) { wal_locations.transform_values { |v| [v, offset] } } + let(:existing_wal) do + { + main: '0/D525E3A0', + ci: 'AB/12340' + } end - end - context "when the key exists in redis" do - let(:existing_offset) { '1023'} - let(:existing_wal_locations) do + let(:new_wal_location_with_offset) do { - main: '0/D525E3NM', - ci: 'AB/111112' + # offset is relative to `existing_wal` + main: ['0/D525E3A8', '8'], + ci: ['AB/12345', '5'] } end + let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) } + + it 'stores a wal location to redis with an offset relative to existing wal location' do + expect { duplicate_job.update_latest_wal_location! } + .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) } + .from([]) + .to(new_wal_location_with_offset[:main]) + .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) } + .from([]) + .to(new_wal_location_with_offset[:ci]) + end + end + + context "when the key exists in redis" do before do - rpush_to_redis_key(wal_location_key(idempotency_key, :main), existing_wal_locations[:main], existing_offset) - rpush_to_redis_key(wal_location_key(idempotency_key, :ci), existing_wal_locations[:ci], existing_offset) + rpush_to_redis_key(wal_location_key(idempotency_key, :main), *stored_wal_location_with_offset[:main]) + rpush_to_redis_key(wal_location_key(idempotency_key, :ci), *stored_wal_location_with_offset[:ci]) end + let(:wal_locations) { new_wal_location_with_offset.transform_values(&:first) } + context "when the new offset is bigger then the existing one" do - include_examples 'updates wal location' do - let(:existing_wal_with_offset) { existing_wal_locations.transform_values { |v| [v, existing_offset] } } - let(:new_wal_with_offset) { wal_locations.transform_values { |v| [v, offset] } } + let(:existing_wal) do + { + main: '0/D525E3A0', + ci: 'AB/12340' + } + end + + let(:stored_wal_location_with_offset) do + { + # offset is relative to `existing_wal` + main: ['0/D525E3A3', '3'], + ci: ['AB/12342', '2'] + } + end + + let(:new_wal_location_with_offset) do + { + # offset is relative to `existing_wal` + main: ['0/D525E3A8', '8'], + ci: ['AB/12345', '5'] + } + end + + it 'updates a wal location to redis with an offset' do + expect { duplicate_job.update_latest_wal_location! } + .to change { read_range_from_redis(wal_location_key(idempotency_key, :main)) } + .from(stored_wal_location_with_offset[:main]) + .to(new_wal_location_with_offset[:main]) + .and change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) } + .from(stored_wal_location_with_offset[:ci]) + .to(new_wal_location_with_offset[:ci]) end end context "when the old offset is not bigger then the existing one" do - let(:existing_offset) { offset } + let(:existing_wal) do + { + main: '0/D525E3A0', + ci: 'AB/12340' + } + end + + let(:stored_wal_location_with_offset) do + { + # offset is relative to `existing_wal` + main: ['0/D525E3A8', '8'], + ci: ['AB/12345', '5'] + } + end + + let(:new_wal_location_with_offset) do + { + # offset is relative to `existing_wal` + main: ['0/D525E3A2', '2'], + ci: ['AB/12342', '2'] + } + end it "does not update a wal location to redis with an offset" do expect { duplicate_job.update_latest_wal_location! } .to not_change { read_range_from_redis(wal_location_key(idempotency_key, :main)) } - .from([existing_wal_locations[:main], existing_offset]) + .from(stored_wal_location_with_offset[:main]) .and not_change { read_range_from_redis(wal_location_key(idempotency_key, :ci)) } - .from([existing_wal_locations[:ci], existing_offset]) + .from(stored_wal_location_with_offset[:ci]) end end end @@ -270,6 +352,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi context 'when the key exists in redis' do before do set_idempotency_key(idempotency_key, 'existing-jid') + set_idempotency_key(deduplicated_flag_key, 1) wal_locations.each do |config_name, location| set_idempotency_key(existing_wal_location_key(idempotency_key, config_name), location) set_idempotency_key(wal_location_key(idempotency_key, config_name), location) @@ -299,6 +382,11 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi let(:from_value) { 'existing-jid' } end + it_behaves_like 'deleting keys from redis', 'deduplication counter key' do + let(:key) { deduplicated_flag_key } + let(:from_value) { '1' } + end + it_behaves_like 'deleting keys from redis', 'existing wal location keys for main database' do let(:key) { existing_wal_location_key(idempotency_key, :main) } let(:from_value) { wal_locations[:main] } @@ -390,6 +478,103 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end end + describe '#reschedule' do + it 'reschedules the current job' do + fake_logger = instance_double(Gitlab::SidekiqLogging::DeduplicationLogger) + expect(Gitlab::SidekiqLogging::DeduplicationLogger).to receive(:instance).and_return(fake_logger) + expect(fake_logger).to receive(:rescheduled_log).with(a_hash_including({ 'jid' => '123' })) + expect(AuthorizedProjectsWorker).to receive(:perform_async).with(1).once + + duplicate_job.reschedule + end + end + + describe '#should_reschedule?' do + subject { duplicate_job.should_reschedule? } + + context 'when the job is reschedulable' do + before do + allow(duplicate_job).to receive(:reschedulable?) { true } + end + + it { is_expected.to eq(false) } + + context 'with deduplicated flag' do + before do + duplicate_job.set_deduplicated_flag! + end + + it { is_expected.to eq(true) } + end + end + + context 'when the job is not reschedulable' do + before do + allow(duplicate_job).to receive(:reschedulable?) { false } + end + + it { is_expected.to eq(false) } + + context 'with deduplicated flag' do + before do + duplicate_job.set_deduplicated_flag! + end + + it { is_expected.to eq(false) } + end + end + end + + describe '#set_deduplicated_flag!' do + context 'when the job is reschedulable' do + before do + allow(duplicate_job).to receive(:reschedulable?) { true } + end + + it 'sets the key in Redis' do + duplicate_job.set_deduplicated_flag! + + flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) } + + expect(flag).to eq(described_class::DEDUPLICATED_FLAG_VALUE.to_s) + end + + it 'sets, gets and cleans up the deduplicated flag' do + expect(duplicate_job.should_reschedule?).to eq(false) + + duplicate_job.set_deduplicated_flag! + expect(duplicate_job.should_reschedule?).to eq(true) + + duplicate_job.delete! + expect(duplicate_job.should_reschedule?).to eq(false) + end + end + + context 'when the job is not reschedulable' do + before do + allow(duplicate_job).to receive(:reschedulable?) { false } + end + + it 'does not set the key in Redis' do + duplicate_job.set_deduplicated_flag! + + flag = Sidekiq.redis { |redis| redis.get(deduplicated_flag_key) } + + expect(flag).to be_nil + end + + it 'does not set the deduplicated flag' do + expect(duplicate_job.should_reschedule?).to eq(false) + + duplicate_job.set_deduplicated_flag! + expect(duplicate_job.should_reschedule?).to eq(false) + + duplicate_job.delete! + expect(duplicate_job.should_reschedule?).to eq(false) + end + end + end + describe '#duplicate?' do it "raises an error if the check wasn't performed" do expect { duplicate_job.duplicate? }.to raise_error /Call `#check!` first/ @@ -494,12 +679,12 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gi end end - def existing_wal_location_key(idempotency_key, config_name) - "#{idempotency_key}:#{config_name}:existing_wal_location" + def existing_wal_location_key(idempotency_key, connection_name) + "#{idempotency_key}:#{connection_name}:existing_wal_location" end - def wal_location_key(idempotency_key, config_name) - "#{idempotency_key}:#{config_name}:wal_location" + def wal_location_key(idempotency_key, connection_name) + "#{idempotency_key}:#{connection_name}:wal_location" end def set_idempotency_key(key, value = '1') diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb index 9772255fc50..963301bc001 100644 --- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executed_spec.rb @@ -9,6 +9,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut before do allow(fake_duplicate_job).to receive(:latest_wal_locations).and_return( {} ) + allow(fake_duplicate_job).to receive(:scheduled?) { false } + allow(fake_duplicate_job).to receive(:options) { {} } + allow(fake_duplicate_job).to receive(:should_reschedule?) { false } end it 'deletes the lock after executing' do @@ -19,6 +22,28 @@ RSpec.describe Gitlab::SidekiqMiddleware::DuplicateJobs::Strategies::UntilExecut proc.call end end + + it 'does not reschedule the job even if deduplication happened' do + expect(fake_duplicate_job).to receive(:delete!) + expect(fake_duplicate_job).not_to receive(:reschedule) + + strategy.perform({}) do + proc.call + end + end + + context 'when job is reschedulable' do + it 'reschedules the job if deduplication happened' do + allow(fake_duplicate_job).to receive(:should_reschedule?) { true } + + expect(fake_duplicate_job).to receive(:delete!) + expect(fake_duplicate_job).to receive(:reschedule).once + + strategy.perform({}) do + proc.call + end + end + end end end end diff --git a/spec/lib/gitlab/sidekiq_middleware/query_analyzer_spec.rb b/spec/lib/gitlab/sidekiq_middleware/query_analyzer_spec.rb new file mode 100644 index 00000000000..e58af1d60fe --- /dev/null +++ b/spec/lib/gitlab/sidekiq_middleware/query_analyzer_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::SidekiqMiddleware::QueryAnalyzer, query_analyzers: false do + describe 'the PreventCrossDatabaseModification' do + describe '#call' do + let(:worker) { double(:worker) } + let(:job) { { 'jid' => 'job123' } } + let(:queue) { 'some-queue' } + let(:middleware) { described_class.new } + + def do_queries + end + + subject { middleware.call(worker, job, queue) { do_queries } } + + context 'when there is a cross modification' do + def do_queries + Project.transaction do + Project.where(id: -1).update_all(id: -1) + ::Ci::Pipeline.where(id: -1).update_all(id: -1) + end + end + + it 'detects cross modifications and tracks exception' do + expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception) + + subject + end + + context 'when the detect_cross_database_modification is disabled' do + before do + stub_feature_flags(detect_cross_database_modification: false) + end + + it 'does not detect cross modifications' do + expect(::Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + subject + end + end + end + + context 'when there is no cross modification' do + def do_queries + Project.transaction do + Project.where(id: -1).update_all(id: -1) + Namespace.where(id: -1).update_all(id: -1) + end + end + + it 'does not log anything' do + expect(::Gitlab::ErrorTracking).not_to receive(:track_and_raise_for_dev_exception) + + subject + end + end + end + end +end diff --git a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb index 3a6fdd7642c..876069a1a92 100644 --- a/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/size_limiter/validator_spec.rb @@ -59,111 +59,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator, :aggregate_fai expect(validator.size_limit).to eq(2) end end - - context 'when the input mode is valid' do - it 'does not log a warning message' do - expect(::Sidekiq.logger).not_to receive(:warn) - - described_class.new(TestSizeLimiterWorker, job_payload, mode: 'track') - described_class.new(TestSizeLimiterWorker, job_payload, mode: 'compress') - end - end - - context 'when the input mode is invalid' do - it 'defaults to track mode and logs a warning message' do - expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter mode: invalid. Fallback to track mode.') - - validator = described_class.new(TestSizeLimiterWorker, job_payload, mode: 'invalid') - - expect(validator.mode).to eql('track') - end - end - - context 'when the input mode is empty' do - it 'defaults to track mode' do - expect(::Sidekiq.logger).not_to receive(:warn) - - validator = described_class.new(TestSizeLimiterWorker, job_payload, mode: nil) - - expect(validator.mode).to eql('track') - end - end - - context 'when the size input is valid' do - it 'does not log a warning message' do - expect(::Sidekiq.logger).not_to receive(:warn) - - described_class.new(TestSizeLimiterWorker, job_payload, size_limit: 300) - described_class.new(TestSizeLimiterWorker, job_payload, size_limit: 0) - end - end - - context 'when the size input is invalid' do - it 'logs a warning message' do - expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter limit: -1') - - validator = described_class.new(TestSizeLimiterWorker, job_payload, size_limit: -1) - - expect(validator.size_limit).to be(0) - end - end - - context 'when the size input is empty' do - it 'defaults to 0' do - expect(::Sidekiq.logger).not_to receive(:warn) - - validator = described_class.new(TestSizeLimiterWorker, job_payload, size_limit: nil) - - expect(validator.size_limit).to be(described_class::DEFAULT_SIZE_LIMIT) - end - end - - context 'when the compression threshold is valid' do - it 'does not log a warning message' do - expect(::Sidekiq.logger).not_to receive(:warn) - - described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 300) - described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 1) - end - end - - context 'when the compression threshold is negative' do - it 'logs a warning message' do - expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter compression threshold: -1') - - described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: -1) - end - - it 'falls back to the default' do - validator = described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: -1) - - expect(validator.compression_threshold).to be(100_000) - end - end - - context 'when the compression threshold is zero' do - it 'logs a warning message' do - expect(::Sidekiq.logger).to receive(:warn).with('Invalid Sidekiq size limiter compression threshold: 0') - - described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 0) - end - - it 'falls back to the default' do - validator = described_class.new(TestSizeLimiterWorker, job_payload, compression_threshold: 0) - - expect(validator.compression_threshold).to be(100_000) - end - end - - context 'when the compression threshold is empty' do - it 'defaults to 100_000' do - expect(::Sidekiq.logger).not_to receive(:warn) - - validator = described_class.new(TestSizeLimiterWorker, job_payload) - - expect(validator.compression_threshold).to be(100_000) - end - end end shared_examples 'validate limit job payload size' do @@ -171,20 +66,6 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator, :aggregate_fai let(:compression_threshold) { nil } let(:mode) { 'track' } - context 'when size limit negative' do - let(:size_limit) { -1 } - - it 'does not track jobs' do - expect(Gitlab::ErrorTracking).not_to receive(:track_exception) - - validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) - end - - it 'does not raise exception' do - expect { validate.call(TestSizeLimiterWorker, job_payload(a: 'a' * 300)) }.not_to raise_error - end - end - context 'when size limit is 0' do let(:size_limit) { 0 } let(:job) { job_payload(a: 'a' * 300) } @@ -438,36 +319,20 @@ RSpec.describe Gitlab::SidekiqMiddleware::SizeLimiter::Validator, :aggregate_fai end describe '#validate!' do - context 'when creating an instance with the related configuration variables' do - let(:validate) do - ->(worker_clas, job) do - described_class.new(worker_class, job).validate! - end + let(:validate) do + ->(worker_class, job) do + described_class.new(worker_class, job).validate! end - - before do - stub_application_setting( - sidekiq_job_limiter_mode: mode, - sidekiq_job_limiter_compression_threshold_bytes: compression_threshold, - sidekiq_job_limiter_limit_bytes: size_limit - ) - end - - it_behaves_like 'validate limit job payload size' end - context 'when creating an instance with mode and size limit' do - let(:validate) do - ->(worker_clas, job) do - validator = described_class.new( - worker_class, job, - mode: mode, size_limit: size_limit, compression_threshold: compression_threshold - ) - validator.validate! - end - end - - it_behaves_like 'validate limit job payload size' + before do + stub_application_setting( + sidekiq_job_limiter_mode: mode, + sidekiq_job_limiter_compression_threshold_bytes: compression_threshold, + sidekiq_job_limiter_limit_bytes: size_limit + ) end + + it_behaves_like 'validate limit job payload size' end end diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb index 92a11c83a4a..b9a13fd697e 100644 --- a/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb +++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/client_spec.rb @@ -11,7 +11,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do include ApplicationWorker - feature_category :issue_tracking + feature_category :team_planning def self.job_for_args(args) jobs.find { |job| job['args'] == args } @@ -78,8 +78,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3]) job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3]) - expect(job1['meta.feature_category']).to eq('issue_tracking') - expect(job2['meta.feature_category']).to eq('issue_tracking') + expect(job1['meta.feature_category']).to eq('team_planning') + expect(job2['meta.feature_category']).to eq('team_planning') end it 'takes the feature category from the caller if the worker is not owned' do @@ -116,8 +116,8 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Client do job1 = TestWithContextWorker.job_for_args(['job1', 1, 2, 3]) job2 = TestWithContextWorker.job_for_args(['job2', 1, 2, 3]) - expect(job1['meta.feature_category']).to eq('issue_tracking') - expect(job2['meta.feature_category']).to eq('issue_tracking') + expect(job1['meta.feature_category']).to eq('team_planning') + expect(job2['meta.feature_category']).to eq('team_planning') end it 'takes the feature category from the caller if the worker is not owned' do diff --git a/spec/lib/gitlab/spamcheck/client_spec.rb b/spec/lib/gitlab/spamcheck/client_spec.rb index 15e963fe423..e542ce455bb 100644 --- a/spec/lib/gitlab/spamcheck/client_spec.rb +++ b/spec/lib/gitlab/spamcheck/client_spec.rb @@ -82,7 +82,7 @@ RSpec.describe Gitlab::Spamcheck::Client do end end - describe '#build_user_proto_buf', :aggregate_failures do + describe '#build_user_protobuf', :aggregate_failures do it 'builds the expected protobuf object' do user_pb = described_class.new.send(:build_user_protobuf, user) expect(user_pb.username).to eq user.username diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb index a3808b0f0e2..4be1c85f7c8 100644 --- a/spec/lib/gitlab/subscription_portal_spec.rb +++ b/spec/lib/gitlab/subscription_portal_spec.rb @@ -9,14 +9,13 @@ RSpec.describe ::Gitlab::SubscriptionPortal do before do stub_env('CUSTOMER_PORTAL_URL', env_value) - stub_feature_flags(new_customersdot_staging_url: false) end describe '.default_subscriptions_url' do where(:test, :development, :result) do false | false | 'https://customers.gitlab.com' - false | true | 'https://customers.stg.gitlab.com' - true | false | 'https://customers.stg.gitlab.com' + false | true | 'https://customers.staging.gitlab.com' + true | false | 'https://customers.staging.gitlab.com' end before do @@ -35,7 +34,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do subject { described_class.subscriptions_url } context 'when CUSTOMER_PORTAL_URL ENV is unset' do - it { is_expected.to eq('https://customers.stg.gitlab.com') } + it { is_expected.to eq('https://customers.staging.gitlab.com') } end context 'when CUSTOMER_PORTAL_URL ENV is set' do @@ -55,15 +54,15 @@ RSpec.describe ::Gitlab::SubscriptionPortal do context 'url methods' do where(:method_name, :result) do - :default_subscriptions_url | 'https://customers.stg.gitlab.com' - :payment_form_url | 'https://customers.stg.gitlab.com/payment_forms/cc_validation' - :subscriptions_graphql_url | 'https://customers.stg.gitlab.com/graphql' - :subscriptions_more_minutes_url | 'https://customers.stg.gitlab.com/buy_pipeline_minutes' - :subscriptions_more_storage_url | 'https://customers.stg.gitlab.com/buy_storage' - :subscriptions_manage_url | 'https://customers.stg.gitlab.com/subscriptions' - :subscriptions_plans_url | 'https://customers.stg.gitlab.com/plans' - :subscriptions_instance_review_url | 'https://customers.stg.gitlab.com/instance_review' - :subscriptions_gitlab_plans_url | 'https://customers.stg.gitlab.com/gitlab_plans' + :default_subscriptions_url | 'https://customers.staging.gitlab.com' + :payment_form_url | 'https://customers.staging.gitlab.com/payment_forms/cc_validation' + :subscriptions_graphql_url | 'https://customers.staging.gitlab.com/graphql' + :subscriptions_more_minutes_url | 'https://customers.staging.gitlab.com/buy_pipeline_minutes' + :subscriptions_more_storage_url | 'https://customers.staging.gitlab.com/buy_storage' + :subscriptions_manage_url | 'https://customers.staging.gitlab.com/subscriptions' + :subscriptions_plans_url | 'https://about.gitlab.com/pricing/' + :subscriptions_instance_review_url | 'https://customers.staging.gitlab.com/instance_review' + :subscriptions_gitlab_plans_url | 'https://customers.staging.gitlab.com/gitlab_plans' end with_them do @@ -78,7 +77,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do let(:group_id) { 153 } - it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") } + it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/extra_seats") } end describe '.upgrade_subscription_url' do @@ -87,7 +86,7 @@ RSpec.describe ::Gitlab::SubscriptionPortal do let(:group_id) { 153 } let(:plan_id) { 5 } - it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") } + it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/upgrade/#{plan_id}") } end describe '.renew_subscription_url' do @@ -95,6 +94,6 @@ RSpec.describe ::Gitlab::SubscriptionPortal do let(:group_id) { 153 } - it { is_expected.to eq("https://customers.stg.gitlab.com/gitlab/namespaces/#{group_id}/renew") } + it { is_expected.to eq("https://customers.staging.gitlab.com/gitlab/namespaces/#{group_id}/renew") } end end diff --git a/spec/lib/gitlab/tracking/destinations/product_analytics_spec.rb b/spec/lib/gitlab/tracking/destinations/product_analytics_spec.rb deleted file mode 100644 index 63e2e930acd..00000000000 --- a/spec/lib/gitlab/tracking/destinations/product_analytics_spec.rb +++ /dev/null @@ -1,84 +0,0 @@ -# frozen_string_literal: true - -require 'spec_helper' - -RSpec.describe Gitlab::Tracking::Destinations::ProductAnalytics do - let(:emitter) { SnowplowTracker::Emitter.new('localhost', buffer_size: 1) } - let(:tracker) { SnowplowTracker::Tracker.new(emitter, SnowplowTracker::Subject.new, 'namespace', 'app_id') } - - describe '#event' do - shared_examples 'does not send an event' do - it 'does not send an event' do - expect_any_instance_of(SnowplowTracker::Tracker).not_to receive(:track_struct_event) - - subject.event(allowed_category, allowed_action) - end - end - - let(:allowed_category) { 'epics' } - let(:allowed_action) { 'promote' } - let(:self_monitoring_project) { create(:project) } - - before do - stub_feature_flags(product_analytics_tracking: true) - stub_application_setting(self_monitoring_project_id: self_monitoring_project.id) - stub_application_setting(usage_ping_enabled: true) - end - - context 'with allowed event' do - it 'sends an event to Product Analytics snowplow collector' do - expect(SnowplowTracker::AsyncEmitter) - .to receive(:new) - .with(ProductAnalytics::Tracker::COLLECTOR_URL, protocol: Gitlab.config.gitlab.protocol) - .and_return(emitter) - - expect(SnowplowTracker::Tracker) - .to receive(:new) - .with(emitter, an_instance_of(SnowplowTracker::Subject), Gitlab::Tracking::SNOWPLOW_NAMESPACE, self_monitoring_project.id.to_s) - .and_return(tracker) - - freeze_time do - expect(tracker) - .to receive(:track_struct_event) - .with(allowed_category, allowed_action, 'label', 'property', 1.5, nil, (Time.now.to_f * 1000).to_i) - - subject.event(allowed_category, allowed_action, label: 'label', property: 'property', value: 1.5) - end - end - end - - context 'with non-allowed event' do - it 'does not send an event' do - expect_any_instance_of(SnowplowTracker::Tracker).not_to receive(:track_struct_event) - - subject.event('category', 'action') - subject.event(allowed_category, 'action') - subject.event('category', allowed_action) - end - end - - context 'when self-monitoring project does not exist' do - before do - stub_application_setting(self_monitoring_project_id: nil) - end - - include_examples 'does not send an event' - end - - context 'when product_analytics_tracking FF is disabled' do - before do - stub_feature_flags(product_analytics_tracking: false) - end - - include_examples 'does not send an event' - end - - context 'when usage ping is disabled' do - before do - stub_application_setting(usage_ping_enabled: false) - end - - include_examples 'does not send an event' - end - end -end diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb new file mode 100644 index 00000000000..6004698d092 --- /dev/null +++ b/spec/lib/gitlab/tracking/destinations/snowplow_micro_spec.rb @@ -0,0 +1,51 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Tracking::Destinations::SnowplowMicro do + include StubENV + + before do + stub_application_setting(snowplow_enabled: true) + stub_env('SNOWPLOW_MICRO_ENABLE', '1') + allow(Rails.env).to receive(:development?).and_return(true) + end + + describe '#hostname' do + context 'when SNOWPLOW_MICRO_URI is set' do + before do + stub_env('SNOWPLOW_MICRO_URI', 'http://gdk.test:9091') + end + + it 'returns hostname URI part' do + expect(subject.hostname).to eq('gdk.test:9091') + end + end + + context 'when SNOWPLOW_MICRO_URI is without protocol' do + before do + stub_env('SNOWPLOW_MICRO_URI', 'gdk.test:9091') + end + + it 'returns hostname URI part' do + expect(subject.hostname).to eq('gdk.test:9091') + end + end + + context 'when SNOWPLOW_MICRO_URI is hostname only' do + before do + stub_env('SNOWPLOW_MICRO_URI', 'uriwithoutport') + end + + it 'returns hostname URI with default HTTP port' do + expect(subject.hostname).to eq('uriwithoutport:80') + end + end + + context 'when SNOWPLOW_MICRO_URI is not set' do + it 'returns localhost hostname' do + expect(subject.hostname).to eq('localhost:9090') + end + end + end +end diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb index 8ded80dd191..7d678db5ec8 100644 --- a/spec/lib/gitlab/tracking/standard_context_spec.rb +++ b/spec/lib/gitlab/tracking/standard_context_spec.rb @@ -99,25 +99,5 @@ RSpec.describe Gitlab::Tracking::StandardContext do it 'accepts just project id as integer' do expect { described_class.new(project: 1).to_context }.not_to raise_error end - - context 'without add_namespace_and_project_to_snowplow_tracking feature' do - before do - stub_feature_flags(add_namespace_and_project_to_snowplow_tracking: false) - end - - it 'does not contain project or namespace ids' do - expect(snowplow_context.to_json[:data].keys).not_to include(:project_id, :namespace_id) - end - end - - context 'without add_actor_based_user_to_snowplow_tracking feature' do - before do - stub_feature_flags(add_actor_based_user_to_snowplow_tracking: false) - end - - it 'does not contain user_id' do - expect(snowplow_context.to_json[:data].keys).not_to include(:user_id) - end - end end end diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb index dacaae55676..61b2c89ffa1 100644 --- a/spec/lib/gitlab/tracking_spec.rb +++ b/spec/lib/gitlab/tracking_spec.rb @@ -2,6 +2,8 @@ require 'spec_helper' RSpec.describe Gitlab::Tracking do + include StubENV + before do stub_application_setting(snowplow_enabled: true) stub_application_setting(snowplow_collector_hostname: 'gitfoo.com') @@ -12,17 +14,62 @@ RSpec.describe Gitlab::Tracking do end describe '.options' do - it 'returns useful client options' do - expected_fields = { - namespace: 'gl', - hostname: 'gitfoo.com', - cookieDomain: '.gitfoo.com', - appId: '_abc123_', - formTracking: true, - linkClickTracking: true - } - - expect(subject.options(nil)).to match(expected_fields) + shared_examples 'delegates to destination' do |klass| + before do + allow_next_instance_of(klass) do |instance| + allow(instance).to receive(:options).and_call_original + end + end + + it "delegates to #{klass} destination" do + expect_next_instance_of(klass) do |instance| + expect(instance).to receive(:options) + end + + subject.options(nil) + end + end + + context 'when destination is Snowplow' do + it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow + + it 'returns useful client options' do + expected_fields = { + namespace: 'gl', + hostname: 'gitfoo.com', + cookieDomain: '.gitfoo.com', + appId: '_abc123_', + formTracking: true, + linkClickTracking: true + } + + expect(subject.options(nil)).to match(expected_fields) + end + end + + context 'when destination is SnowplowMicro' do + before do + stub_env('SNOWPLOW_MICRO_ENABLE', '1') + allow(Rails.env).to receive(:development?).and_return(true) + end + + it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::SnowplowMicro + + it 'returns useful client options' do + expected_fields = { + namespace: 'gl', + hostname: 'localhost:9090', + cookieDomain: '.gitlab.com', + appId: '_abc123_', + protocol: 'http', + port: 9090, + force_secure_tracker: false, + formTracking: true, + linkClickTracking: true + } + + expect(subject.options(nil)).to match(expected_fields) + end end it 'when feature flag is disabled' do @@ -41,7 +88,6 @@ RSpec.describe Gitlab::Tracking do shared_examples 'delegates to destination' do |klass| before do allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow).to receive(:event) - allow_any_instance_of(Gitlab::Tracking::Destinations::ProductAnalytics).to receive(:event) end it "delegates to #{klass} destination" do @@ -72,8 +118,23 @@ RSpec.describe Gitlab::Tracking do end end - it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow - it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::ProductAnalytics + context 'when destination is Snowplow' do + before do + stub_env('SNOWPLOW_MICRO_ENABLE', '0') + allow(Rails.env).to receive(:development?).and_return(true) + end + + it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow + end + + context 'when destination is SnowplowMicro' do + before do + stub_env('SNOWPLOW_MICRO_ENABLE', '1') + allow(Rails.env).to receive(:development?).and_return(true) + end + + it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::SnowplowMicro + end it 'tracks errors' do expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with( diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb index 522f69062fb..a22b3a733bd 100644 --- a/spec/lib/gitlab/usage/metric_definition_spec.rb +++ b/spec/lib/gitlab/usage/metric_definition_spec.rb @@ -9,6 +9,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do value_type: 'string', product_category: 'collection', product_stage: 'growth', + product_section: 'devops', status: 'active', milestone: '14.1', default_generation: 'generation_1', @@ -222,6 +223,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do value_type: 'string', product_category: 'collection', product_stage: 'growth', + product_section: 'devops', status: 'active', milestone: '14.1', default_generation: 'generation_1', diff --git a/spec/lib/gitlab/usage/metric_spec.rb b/spec/lib/gitlab/usage/metric_spec.rb index ea8d1a135a6..19d2d3048eb 100644 --- a/spec/lib/gitlab/usage/metric_spec.rb +++ b/spec/lib/gitlab/usage/metric_spec.rb @@ -45,4 +45,10 @@ RSpec.describe Gitlab::Usage::Metric do expect(described_class.new(issue_count_metric_definiton).with_instrumentation).to eq({ counts: { issues: "SELECT COUNT(\"issues\".\"id\") FROM \"issues\"" } }) end end + + describe '#with_suggested_name' do + it 'returns key_path metric with the corresponding generated query' do + expect(described_class.new(issue_count_metric_definiton).with_suggested_name).to eq({ counts: { issues: 'count_issues' } }) + end + end end diff --git a/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb b/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb index 158be34d39c..c8cb1bb4373 100644 --- a/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb +++ b/spec/lib/gitlab/usage/metrics/instrumentations/generic_metric_spec.rb @@ -7,18 +7,18 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GenericMetric do subject do Class.new(described_class) do fallback(custom_fallback) - value { Gitlab::Database.main.version } + value { ApplicationRecord.database.version } end.new(time_frame: 'none') end describe '#value' do it 'gives the correct value' do - expect(subject.value).to eq(Gitlab::Database.main.version) + expect(subject.value).to eq(ApplicationRecord.database.version) end context 'when raising an exception' do it 'return the custom fallback' do - expect(Gitlab::Database.main).to receive(:version).and_raise('Error') + expect(ApplicationRecord.database).to receive(:version).and_raise('Error') expect(subject.value).to eq(custom_fallback) end end @@ -28,18 +28,18 @@ RSpec.describe Gitlab::Usage::Metrics::Instrumentations::GenericMetric do context 'with default fallback' do subject do Class.new(described_class) do - value { Gitlab::Database.main.version } + value { ApplicationRecord.database.version } end.new(time_frame: 'none') end describe '#value' do it 'gives the correct value' do - expect(subject.value).to eq(Gitlab::Database.main.version ) + expect(subject.value).to eq(ApplicationRecord.database.version ) end context 'when raising an exception' do it 'return the default fallback' do - expect(Gitlab::Database.main).to receive(:version).and_raise('Error') + expect(ApplicationRecord.database).to receive(:version).and_raise('Error') expect(subject.value).to eq(described_class::FALLBACK) end end diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb index 0f95da74ff9..dbbc718e147 100644 --- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb +++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb @@ -25,10 +25,30 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do end context 'for count with default column metrics' do - it_behaves_like 'name suggestion' do - # corresponding metric is collected with count(Board) - let(:key_path) { 'counts.boards' } - let(:name_suggestion) { /count_boards/ } + context 'with usage_data_instrumentation feature flag' do + context 'when enabled' do + before do + stub_feature_flags(usage_data_instrumentation: true) + end + + it_behaves_like 'name suggestion' do + # corresponding metric is collected with ::Gitlab::UsageDataMetrics.suggested_names + let(:key_path) { 'counts.boards' } + let(:name_suggestion) { /count_boards/ } + end + end + + context 'when disabled' do + before do + stub_feature_flags(usage_data_instrumentation: false) + end + + it_behaves_like 'name suggestion' do + # corresponding metric is collected with count(Board) + let(:key_path) { 'counts.boards' } + let(:name_suggestion) { /count_boards/ } + end + end end end diff --git a/spec/lib/gitlab/usage_data_counters/vs_code_extenion_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/vscode_extenion_activity_unique_counter_spec.rb index 7593d51fe76..7593d51fe76 100644 --- a/spec/lib/gitlab/usage_data_counters/vs_code_extenion_activity_unique_counter_spec.rb +++ b/spec/lib/gitlab/usage_data_counters/vscode_extenion_activity_unique_counter_spec.rb diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb index ee0cfb1407e..563eed75c38 100644 --- a/spec/lib/gitlab/usage_data_metrics_spec.rb +++ b/spec/lib/gitlab/usage_data_metrics_spec.rb @@ -13,7 +13,9 @@ RSpec.describe Gitlab::UsageDataMetrics do end before do - allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false) + allow_next_instance_of(Gitlab::Database::BatchCounter) do |batch_counter| + allow(batch_counter).to receive(:transaction_open?).and_return(false) + end end context 'with instrumentation_class' do @@ -76,4 +78,16 @@ RSpec.describe Gitlab::UsageDataMetrics do end end end + + describe '.suggested_names' do + subject { described_class.suggested_names } + + let(:suggested_names) do + ::Gitlab::Usage::Metric.all.map(&:with_suggested_name).reduce({}, :deep_merge) + end + + it 'includes Service Ping suggested names' do + expect(subject).to match_array(suggested_names) + end + end end diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb index 833bf260019..cf544c07195 100644 --- a/spec/lib/gitlab/usage_data_spec.rb +++ b/spec/lib/gitlab/usage_data_spec.rb @@ -80,6 +80,12 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end end end + + it 'allows indifferent access' do + allow(::Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:unique_events).and_return(1) + expect(subject[:search_unique_visits][:search_unique_visits_for_any_target_monthly]).to eq(1) + expect(subject[:search_unique_visits]['search_unique_visits_for_any_target_monthly']).to eq(1) + end end describe 'usage_activity_by_stage_package' do @@ -187,6 +193,8 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end describe 'usage_activity_by_stage_manage' do + let_it_be(:error_rate) { Gitlab::Database::PostgresHll::BatchDistinctCounter::ERROR_RATE } + it 'includes accurate usage_activity_by_stage data' do stub_config( omniauth: @@ -207,14 +215,14 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end expect(described_class.usage_activity_by_stage_manage({})).to include( - events: 2, + events: -1, groups: 2, users_created: 6, omniauth_providers: ['google_oauth2'], user_auth_by_provider: { 'group_saml' => 2, 'ldap' => 4, 'standard' => 0, 'two-factor' => 0, 'two-factor-via-u2f-device' => 0, "two-factor-via-webauthn-device" => 0 } ) expect(described_class.usage_activity_by_stage_manage(described_class.monthly_time_range_db_params)).to include( - events: 1, + events: be_within(error_rate).percent_of(1), groups: 1, users_created: 3, omniauth_providers: ['google_oauth2'], @@ -367,9 +375,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do def omniauth_providers [ - OpenStruct.new(name: 'google_oauth2'), - OpenStruct.new(name: 'ldapmain'), - OpenStruct.new(name: 'group_saml') + double('provider', name: 'google_oauth2'), + double('provider', name: 'ldapmain'), + double('provider', name: 'group_saml') ] end end @@ -428,7 +436,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do end expect(described_class.usage_activity_by_stage_plan({})).to include( - issues: 3, notes: 2, projects: 2, todos: 2, @@ -439,7 +446,6 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do projects_jira_dvcs_server_active: 2 ) expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include( - issues: 2, notes: 1, projects: 1, todos: 1, @@ -450,6 +456,44 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do projects_jira_dvcs_server_active: 1 ) end + + context 'with usage_data_instrumentation feature flag' do + context 'when enabled' do + it 'merges the data from instrumentation classes' do + stub_feature_flags(usage_data_instrumentation: true) + + for_defined_days_back do + user = create(:user) + project = create(:project, creator: user) + create(:issue, project: project, author: user) + create(:issue, project: project, author: User.support_bot) + end + + expect(described_class.usage_activity_by_stage_plan({})).to include(issues: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) + expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) + + uncached_data = described_class.uncached_data + expect(uncached_data[:usage_activity_by_stage][:plan]).to include(issues: 3) + expect(uncached_data[:usage_activity_by_stage_monthly][:plan]).to include(issues: 2) + end + end + + context 'when disabled' do + it 'does not merge the data from instrumentation classes' do + stub_feature_flags(usage_data_instrumentation: false) + + for_defined_days_back do + user = create(:user) + project = create(:project, creator: user) + create(:issue, project: project, author: user) + create(:issue, project: project, author: User.support_bot) + end + + expect(described_class.usage_activity_by_stage_plan({})).to include(issues: 3) + expect(described_class.usage_activity_by_stage_plan(described_class.monthly_time_range_db_params)).to include(issues: 2) + end + end + end end describe 'usage_activity_by_stage_release' do @@ -466,17 +510,53 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do deployments: 2, failed_deployments: 2, releases: 2, - successful_deployments: 2, - releases_with_milestones: 2 + successful_deployments: 2 ) expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include( deployments: 1, failed_deployments: 1, releases: 1, - successful_deployments: 1, - releases_with_milestones: 1 + successful_deployments: 1 ) end + + context 'with usage_data_instrumentation feature flag' do + before do + for_defined_days_back do + user = create(:user) + create(:deployment, :failed, user: user) + release = create(:release, author: user) + create(:milestone, project: release.project, releases: [release]) + create(:deployment, :success, user: user) + end + end + + context 'when enabled' do + before do + stub_feature_flags(usage_data_instrumentation: true) + end + + it 'merges data from instrumentation classes' do + expect(described_class.usage_activity_by_stage_release({})).to include(releases_with_milestones: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) + expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(releases_with_milestones: Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) + + uncached_data = described_class.uncached_data + expect(uncached_data[:usage_activity_by_stage][:release]).to include(releases_with_milestones: 2) + expect(uncached_data[:usage_activity_by_stage_monthly][:release]).to include(releases_with_milestones: 1) + end + end + + context 'when disabled' do + before do + stub_feature_flags(usage_data_instrumentation: false) + end + + it 'does not merge data from instrumentation classes' do + expect(described_class.usage_activity_by_stage_release({})).to include(releases_with_milestones: 2) + expect(described_class.usage_activity_by_stage_release(described_class.monthly_time_range_db_params)).to include(releases_with_milestones: 1) + end + end + end end describe 'usage_activity_by_stage_verify' do @@ -525,16 +605,16 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do subject { described_class.data } it 'gathers usage data' do - expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS) + expect(subject.keys).to include(*UsageDataHelpers::USAGE_DATA_KEYS.map(&:to_s)) end it 'gathers usage counts', :aggregate_failures do count_data = subject[:counts] - expect(count_data[:boards]).to eq(1) expect(count_data[:projects]).to eq(4) - expect(count_data.keys).to include(*UsageDataHelpers::COUNTS_KEYS) - expect(UsageDataHelpers::COUNTS_KEYS - count_data.keys).to be_empty + count_keys = UsageDataHelpers::COUNTS_KEYS.map(&:to_s) + expect(count_data.keys).to include(*count_keys) + expect(count_keys - count_data.keys).to be_empty expect(count_data.values).to all(be_a_kind_of(Integer)) end @@ -619,7 +699,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do external_diffs: { enabled: false }, lfs: { enabled: true, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } }, uploads: { enabled: nil, object_store: { enabled: false, direct_upload: true, background_upload: false, provider: "AWS" } }, - packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } } + packages: { enabled: true, object_store: { enabled: false, direct_upload: false, background_upload: true, provider: "AWS" } } }.with_indifferent_access ) end @@ -793,12 +873,37 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do subject { described_class.license_usage_data } it 'gathers license data' do - expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid) expect(subject[:version]).to eq(Gitlab::VERSION) expect(subject[:installation_type]).to eq('gitlab-development-kit') - expect(subject[:active_user_count]).to eq(User.active.size) expect(subject[:recorded_at]).to be_a(Time) end + + context 'with usage_data_instrumentation feature flag' do + context 'when enabled' do + it 'merges uuid and hostname data from instrumentation classes' do + stub_feature_flags(usage_data_instrumentation: true) + + expect(subject[:uuid]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) + expect(subject[:hostname]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) + expect(subject[:active_user_count]).to eq(Gitlab::Utils::UsageData::INSTRUMENTATION_CLASS_FALLBACK) + + uncached_data = described_class.data + expect(uncached_data[:uuid]).to eq(Gitlab::CurrentSettings.uuid) + expect(uncached_data[:hostname]).to eq(Gitlab.config.gitlab.host) + expect(uncached_data[:active_user_count]).to eq(User.active.size) + end + end + + context 'when disabled' do + it 'does not merge uuid and hostname data from instrumentation classes' do + stub_feature_flags(usage_data_instrumentation: false) + + expect(subject[:uuid]).to eq(Gitlab::CurrentSettings.uuid) + expect(subject[:hostname]).to eq(Gitlab.config.gitlab.host) + expect(subject[:active_user_count]).to eq(User.active.size) + end + end + end end context 'when not relying on database records' do @@ -873,9 +978,9 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(subject[:gitlab_pages][:enabled]).to eq(Gitlab.config.pages.enabled) expect(subject[:gitlab_pages][:version]).to eq(Gitlab::Pages::VERSION) expect(subject[:git][:version]).to eq(Gitlab::Git.version) - expect(subject[:database][:adapter]).to eq(Gitlab::Database.main.adapter_name) - expect(subject[:database][:version]).to eq(Gitlab::Database.main.version) - expect(subject[:database][:pg_system_id]).to eq(Gitlab::Database.main.system_id) + expect(subject[:database][:adapter]).to eq(ApplicationRecord.database.adapter_name) + expect(subject[:database][:version]).to eq(ApplicationRecord.database.version) + expect(subject[:database][:pg_system_id]).to eq(ApplicationRecord.database.system_id) expect(subject[:mail][:smtp_server]).to eq(ActionMailer::Base.smtp_settings[:address]) expect(subject[:gitaly][:version]).to be_present expect(subject[:gitaly][:servers]).to be >= 1 @@ -1061,18 +1166,46 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do expect(subject[:settings][:gitaly_apdex]).to be_within(0.001).of(0.95) end - it 'reports collected data categories' do - expected_value = %w[standard subscription operational optional] + context 'with usage_data_instrumentation feature flag' do + context 'when enabled' do + before do + stub_feature_flags(usage_data_instrumentation: true) + end + + it 'reports collected data categories' do + expected_value = %w[standard subscription operational optional] + + allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance| + expect(instance).to receive(:execute).and_return(expected_value) + end + + expect(described_class.data[:settings][:collected_data_categories]).to eq(expected_value) + end - allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance| - expect(instance).to receive(:execute).and_return(expected_value) + it 'gathers service_ping_features_enabled' do + expect(described_class.data[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled) + end end - expect(subject[:settings][:collected_data_categories]).to eq(expected_value) - end + context 'when disabled' do + before do + stub_feature_flags(usage_data_instrumentation: false) + end + + it 'reports collected data categories' do + expected_value = %w[standard subscription operational optional] - it 'gathers service_ping_features_enabled' do - expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled) + allow_next_instance_of(ServicePing::PermitDataCategoriesService) do |instance| + expect(instance).to receive(:execute).and_return(expected_value) + end + + expect(subject[:settings][:collected_data_categories]).to eq(expected_value) + end + + it 'gathers service_ping_features_enabled' do + expect(subject[:settings][:service_ping_features_enabled]).to eq(Gitlab::CurrentSettings.usage_ping_features_enabled) + end + end end it 'gathers user_cap_feature_enabled' do diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb index 1d01d5c7e6a..e721b28ac29 100644 --- a/spec/lib/gitlab/utils/usage_data_spec.rb +++ b/spec/lib/gitlab/utils/usage_data_spec.rb @@ -8,8 +8,26 @@ RSpec.describe Gitlab::Utils::UsageData do describe '#add_metric' do let(:metric) { 'UuidMetric'} - it 'computes the metric value for given metric' do - expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid) + context 'with usage_data_instrumentation feature flag' do + context 'when enabled' do + before do + stub_feature_flags(usage_data_instrumentation: true) + end + + it 'returns -100 value to be overriden' do + expect(described_class.add_metric(metric)).to eq(-100) + end + end + + context 'when disabled' do + before do + stub_feature_flags(usage_data_instrumentation: false) + end + + it 'computes the metric value for given metric' do + expect(described_class.add_metric(metric)).to eq(Gitlab::CurrentSettings.uuid) + end + end end end @@ -52,7 +70,7 @@ RSpec.describe Gitlab::Utils::UsageData do let(:relation) { double(:relation, connection: double(:connection)) } before do - allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false) # rubocop: disable Database/MultipleDatabases + allow(relation.connection).to receive(:transaction_open?).and_return(false) end it 'delegates counting to counter class instance' do @@ -104,7 +122,7 @@ RSpec.describe Gitlab::Utils::UsageData do let(:ci_builds_estimated_cardinality) { 2.0809220082170614 } before do - allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false) # rubocop: disable Database/MultipleDatabases + allow(model.connection).to receive(:transaction_open?).and_return(false) end context 'different counting parameters' do diff --git a/spec/lib/gitlab/webpack/file_loader_spec.rb b/spec/lib/gitlab/webpack/file_loader_spec.rb new file mode 100644 index 00000000000..34d00b9f106 --- /dev/null +++ b/spec/lib/gitlab/webpack/file_loader_spec.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' +require 'support/helpers/file_read_helpers' +require 'support/webmock' + +RSpec.describe Gitlab::Webpack::FileLoader do + include FileReadHelpers + include WebMock::API + + let(:error_file_path) { "error.yml" } + let(:file_path) { "my_test_file.yml" } + let(:file_contents) do + <<-EOF + - hello + - world + - test + EOF + end + + before do + allow(Gitlab.config.webpack.dev_server).to receive_messages(host: 'hostname', port: 2000, https: false) + allow(Gitlab.config.webpack).to receive(:public_path).and_return('public_path') + allow(Gitlab.config.webpack).to receive(:output_dir).and_return('webpack_output') + end + + context "with dev server enabled" do + before do + allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(true) + + stub_request(:get, "http://hostname:2000/public_path/not_found").to_return(status: 404) + stub_request(:get, "http://hostname:2000/public_path/#{file_path}").to_return(body: file_contents, status: 200) + stub_request(:get, "http://hostname:2000/public_path/#{error_file_path}").to_raise(StandardError) + end + + it "returns content when respondes succesfully" do + expect(Gitlab::Webpack::FileLoader.load(file_path)).to be(file_contents) + end + + it "raises error when 404" do + expect { Gitlab::Webpack::FileLoader.load("not_found") }.to raise_error("HTTP error 404") + end + + it "raises error when errors out" do + expect { Gitlab::Webpack::FileLoader.load(error_file_path) }.to raise_error(Gitlab::Webpack::FileLoader::DevServerLoadError) + end + end + + context "with dev server enabled and https" do + before do + allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(true) + allow(Gitlab.config.webpack.dev_server).to receive(:https).and_return(true) + + stub_request(:get, "https://hostname:2000/public_path/#{error_file_path}").to_raise(EOFError) + end + + it "raises error if catches SSLError" do + expect { Gitlab::Webpack::FileLoader.load(error_file_path) }.to raise_error(Gitlab::Webpack::FileLoader::DevServerSSLError) + end + end + + context "with dev server disabled" do + before do + allow(Gitlab.config.webpack.dev_server).to receive(:enabled).and_return(false) + stub_file_read(::Rails.root.join("webpack_output/#{file_path}"), content: file_contents) + stub_file_read(::Rails.root.join("webpack_output/#{error_file_path}"), error: Errno::ENOENT) + end + + describe ".load" do + it "returns file content from file path" do + expect(Gitlab::Webpack::FileLoader.load(file_path)).to be(file_contents) + end + + it "throws error if file cannot be read" do + expect { Gitlab::Webpack::FileLoader.load(error_file_path) }.to raise_error(Gitlab::Webpack::FileLoader::StaticLoadError) + end + end + end +end diff --git a/spec/lib/gitlab/webpack/graphql_known_operations_spec.rb b/spec/lib/gitlab/webpack/graphql_known_operations_spec.rb new file mode 100644 index 00000000000..89cade82fe6 --- /dev/null +++ b/spec/lib/gitlab/webpack/graphql_known_operations_spec.rb @@ -0,0 +1,47 @@ +# frozen_string_literal: true + +require 'fast_spec_helper' + +RSpec.describe Gitlab::Webpack::GraphqlKnownOperations do + let(:content) do + <<-EOF + - hello + - world + - test + EOF + end + + around do |example| + described_class.clear_memoization! + + example.run + + described_class.clear_memoization! + end + + describe ".load" do + context "when file loader returns" do + before do + allow(::Gitlab::Webpack::FileLoader).to receive(:load).with("graphql_known_operations.yml").and_return(content) + end + + it "returns memoized value" do + expect(::Gitlab::Webpack::FileLoader).to receive(:load).once + + 2.times { ::Gitlab::Webpack::GraphqlKnownOperations.load } + + expect(::Gitlab::Webpack::GraphqlKnownOperations.load).to eq(%w(hello world test)) + end + end + + context "when file loader errors" do + before do + allow(::Gitlab::Webpack::FileLoader).to receive(:load).and_raise(StandardError.new("test")) + end + + it "returns empty array" do + expect(::Gitlab::Webpack::GraphqlKnownOperations.load).to eq([]) + end + end + end +end diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb index 8ba56af561d..3bab9aec454 100644 --- a/spec/lib/gitlab/workhorse_spec.rb +++ b/spec/lib/gitlab/workhorse_spec.rb @@ -512,6 +512,24 @@ RSpec.describe Gitlab::Workhorse do end end + describe '.send_dependency' do + let(:headers) { { Accept: 'foo', Authorization: 'Bearer asdf1234' } } + let(:url) { 'https://foo.bar.com/baz' } + + subject { described_class.send_dependency(headers, url) } + + it 'sets the header correctly', :aggregate_failures do + key, command, params = decode_workhorse_header(subject) + + expect(key).to eq("Gitlab-Workhorse-Send-Data") + expect(command).to eq("send-dependency") + expect(params).to eq({ + 'Header' => headers, + 'Url' => url + }.deep_stringify_keys) + end + end + describe '.send_git_snapshot' do let(:url) { 'http://example.com' } diff --git a/spec/lib/gitlab/x509/certificate_spec.rb b/spec/lib/gitlab/x509/certificate_spec.rb index a5b192dd051..2dc30cc871d 100644 --- a/spec/lib/gitlab/x509/certificate_spec.rb +++ b/spec/lib/gitlab/x509/certificate_spec.rb @@ -5,6 +5,9 @@ require 'spec_helper' RSpec.describe Gitlab::X509::Certificate do include SmimeHelper + let(:sample_ca_certs_path) { Rails.root.join('spec/fixtures/clusters').to_s } + let(:sample_cert) { Rails.root.join('spec/fixtures/x509_certificate.crt').to_s } + # cert generation is an expensive operation and they are used read-only, # so we share them as instance variables in all tests before :context do @@ -13,6 +16,16 @@ RSpec.describe Gitlab::X509::Certificate do @cert = generate_cert(signer_ca: @intermediate_ca) end + before do + stub_const("OpenSSL::X509::DEFAULT_CERT_DIR", sample_ca_certs_path) + stub_const("OpenSSL::X509::DEFAULT_CERT_FILE", sample_cert) + described_class.reset_ca_certs_bundle + end + + after(:context) do + described_class.reset_ca_certs_bundle + end + describe 'testing environment setup' do describe 'generate_root' do subject { @root_ca } @@ -103,6 +116,43 @@ RSpec.describe Gitlab::X509::Certificate do end end + describe '.ca_certs_paths' do + it 'returns all files specified by OpenSSL defaults' do + cert_paths = Dir["#{OpenSSL::X509::DEFAULT_CERT_DIR}/*"] + + expect(described_class.ca_certs_paths).to match_array(cert_paths + [sample_cert]) + end + end + + describe '.ca_certs_bundle' do + it 'skips certificates if OpenSSLError is raised and report it' do + expect(Gitlab::ErrorTracking) + .to receive(:track_and_raise_for_dev_exception) + .with( + a_kind_of(OpenSSL::X509::CertificateError), + cert_file: a_kind_of(String)).at_least(:once) + + expect(OpenSSL::X509::Certificate) + .to receive(:new) + .and_raise(OpenSSL::X509::CertificateError).at_least(:once) + + expect(described_class.ca_certs_bundle).to be_a(String) + end + + it 'returns a list certificates as strings' do + expect(described_class.ca_certs_bundle).to be_a(String) + end + end + + describe '.load_ca_certs_bundle' do + it 'loads a PEM-encoded certificate bundle into an OpenSSL::X509::Certificate array' do + ca_certs_string = described_class.ca_certs_bundle + ca_certs = described_class.load_ca_certs_bundle(ca_certs_string) + + expect(ca_certs).to all(be_an(OpenSSL::X509::Certificate)) + end + end + def common_cert_tests(parsed_cert, cert, signer_ca, with_ca_certs: nil) expect(parsed_cert.cert).to be_a(OpenSSL::X509::Certificate) expect(parsed_cert.cert.subject).to eq(cert[:cert].subject) diff --git a/spec/lib/gitlab/x509/signature_spec.rb b/spec/lib/gitlab/x509/signature_spec.rb index 7ba15faf910..0e34d5393d6 100644 --- a/spec/lib/gitlab/x509/signature_spec.rb +++ b/spec/lib/gitlab/x509/signature_spec.rb @@ -12,7 +12,7 @@ RSpec.describe Gitlab::X509::Signature do end shared_examples "a verified signature" do - let_it_be(:user) { create(:user, email: X509Helpers::User1.certificate_email) } + let!(:user) { create(:user, email: X509Helpers::User1.certificate_email) } subject(:signature) do described_class.new( @@ -30,10 +30,12 @@ RSpec.describe Gitlab::X509::Signature do expect(signature.verification_status).to eq(:verified) end - it "returns an unverified signature if the email matches but isn't confirmed" do - user.update!(confirmed_at: nil) + context "if the email matches but isn't confirmed" do + let!(:user) { create(:user, :unconfirmed, email: X509Helpers::User1.certificate_email) } - expect(signature.verification_status).to eq(:unverified) + it "returns an unverified signature" do + expect(signature.verification_status).to eq(:unverified) + end end it 'returns an unverified signature if email does not match' do @@ -297,7 +299,7 @@ RSpec.describe Gitlab::X509::Signature do end context 'verified signature' do - let_it_be(:user) { create(:user, email: X509Helpers::User1.certificate_email) } + let_it_be(:user) { create(:user, :unconfirmed, email: X509Helpers::User1.certificate_email) } subject(:signature) do described_class.new( @@ -316,52 +318,56 @@ RSpec.describe Gitlab::X509::Signature do allow(OpenSSL::X509::Store).to receive(:new).and_return(store) end - it 'returns a verified signature if email does match' do - expect(signature.x509_certificate).to have_attributes(certificate_attributes) - expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) - expect(signature.verified_signature).to be_truthy - expect(signature.verification_status).to eq(:verified) - end + context 'when user email is confirmed' do + before_all do + user.confirm + end - it "returns an unverified signature if the email matches but isn't confirmed" do - user.update!(confirmed_at: nil) + it 'returns a verified signature if email does match', :ggregate_failures do + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_truthy + expect(signature.verification_status).to eq(:verified) + end - expect(signature.verification_status).to eq(:unverified) - end + it 'returns an unverified signature if email does not match', :aggregate_failures do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + "gitlab@example.com", + X509Helpers::User1.signed_commit_time + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_truthy + expect(signature.verification_status).to eq(:unverified) + end - it 'returns an unverified signature if email does not match' do - signature = described_class.new( - X509Helpers::User1.signed_tag_signature, - X509Helpers::User1.signed_tag_base_data, - "gitlab@example.com", - X509Helpers::User1.signed_commit_time - ) + it 'returns an unverified signature if email does match and time is wrong', :aggregate_failures do + signature = described_class.new( + X509Helpers::User1.signed_tag_signature, + X509Helpers::User1.signed_tag_base_data, + X509Helpers::User1.certificate_email, + Time.new(2020, 2, 22) + ) + + expect(signature.x509_certificate).to have_attributes(certificate_attributes) + expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) + expect(signature.verified_signature).to be_falsey + expect(signature.verification_status).to eq(:unverified) + end - expect(signature.x509_certificate).to have_attributes(certificate_attributes) - expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) - expect(signature.verified_signature).to be_truthy - expect(signature.verification_status).to eq(:unverified) - end + it 'returns an unverified signature if certificate is revoked' do + expect(signature.verification_status).to eq(:verified) - it 'returns an unverified signature if email does match and time is wrong' do - signature = described_class.new( - X509Helpers::User1.signed_tag_signature, - X509Helpers::User1.signed_tag_base_data, - X509Helpers::User1.certificate_email, - Time.new(2020, 2, 22) - ) + signature.x509_certificate.revoked! - expect(signature.x509_certificate).to have_attributes(certificate_attributes) - expect(signature.x509_certificate.x509_issuer).to have_attributes(issuer_attributes) - expect(signature.verified_signature).to be_falsey - expect(signature.verification_status).to eq(:unverified) + expect(signature.verification_status).to eq(:unverified) + end end - it 'returns an unverified signature if certificate is revoked' do - expect(signature.verification_status).to eq(:verified) - - signature.x509_certificate.revoked! - + it 'returns an unverified signature if the email matches but is not confirmed' do expect(signature.verification_status).to eq(:unverified) end end diff --git a/spec/lib/gitlab/zentao/client_spec.rb b/spec/lib/gitlab/zentao/client_spec.rb index e3a335c1e89..86b310fe417 100644 --- a/spec/lib/gitlab/zentao/client_spec.rb +++ b/spec/lib/gitlab/zentao/client_spec.rb @@ -6,7 +6,23 @@ RSpec.describe Gitlab::Zentao::Client do subject(:integration) { described_class.new(zentao_integration) } let(:zentao_integration) { create(:zentao_integration) } - let(:mock_get_products_url) { integration.send(:url, "products/#{zentao_integration.zentao_product_xid}") } + + def mock_get_products_url + integration.send(:url, "products/#{zentao_integration.zentao_product_xid}") + end + + def mock_fetch_issue_url(issue_id) + integration.send(:url, "issues/#{issue_id}") + end + + let(:mock_headers) do + { + headers: { + 'Content-Type' => 'application/json', + 'Token' => zentao_integration.api_token + } + } + end describe '#new' do context 'if integration is nil' do @@ -25,15 +41,6 @@ RSpec.describe Gitlab::Zentao::Client do end describe '#fetch_product' do - let(:mock_headers) do - { - headers: { - 'Content-Type' => 'application/json', - 'Token' => zentao_integration.api_token - } - } - end - context 'with valid product' do let(:mock_response) { { 'id' => zentao_integration.zentao_product_xid } } @@ -54,7 +61,9 @@ RSpec.describe Gitlab::Zentao::Client do end it 'fetches the empty product' do - expect(integration.fetch_product(zentao_integration.zentao_product_xid)).to eq({}) + expect do + integration.fetch_product(zentao_integration.zentao_product_xid) + end.to raise_error(Gitlab::Zentao::Client::Error, 'request error') end end @@ -65,21 +74,14 @@ RSpec.describe Gitlab::Zentao::Client do end it 'fetches the empty product' do - expect(integration.fetch_product(zentao_integration.zentao_product_xid)).to eq({}) + expect do + integration.fetch_product(zentao_integration.zentao_product_xid) + end.to raise_error(Gitlab::Zentao::Client::Error, 'invalid response format') end end end describe '#ping' do - let(:mock_headers) do - { - headers: { - 'Content-Type' => 'application/json', - 'Token' => zentao_integration.api_token - } - } - end - context 'with valid resource' do before do WebMock.stub_request(:get, mock_get_products_url) @@ -102,4 +104,30 @@ RSpec.describe Gitlab::Zentao::Client do end end end + + describe '#fetch_issue' do + context 'with invalid id' do + let(:invalid_ids) { ['story', 'story-', '-', '123', ''] } + + it 'returns empty object' do + invalid_ids.each do |id| + expect { integration.fetch_issue(id) } + .to raise_error(Gitlab::Zentao::Client::Error, 'invalid issue id') + end + end + end + + context 'with valid id' do + let(:valid_ids) { %w[story-1 bug-23] } + + it 'fetches current issue' do + valid_ids.each do |id| + WebMock.stub_request(:get, mock_fetch_issue_url(id)) + .with(mock_headers).to_return(status: 200, body: { issue: { id: id } }.to_json) + + expect(integration.fetch_issue(id).dig('issue', 'id')).to eq id + end + end + end + end end diff --git a/spec/lib/gitlab/zentao/query_spec.rb b/spec/lib/gitlab/zentao/query_spec.rb new file mode 100644 index 00000000000..f7495e640c3 --- /dev/null +++ b/spec/lib/gitlab/zentao/query_spec.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Gitlab::Zentao::Query do + let(:zentao_integration) { create(:zentao_integration) } + let(:params) { {} } + + subject(:query) { described_class.new(zentao_integration, ActionController::Parameters.new(params)) } + + describe '#issues' do + let(:response) { { 'page' => 1, 'total' => 0, 'limit' => 20, 'issues' => [] } } + + def expect_query_option_include(expected_params) + expect_next_instance_of(Gitlab::Zentao::Client) do |client| + expect(client).to receive(:fetch_issues) + .with(hash_including(expected_params)) + .and_return(response) + end + + query.issues + end + + context 'when params are empty' do + it 'fills default params' do + expect_query_option_include(status: 'opened', order: 'lastEditedDate_desc', labels: '') + end + end + + context 'when params contain valid options' do + let(:params) { { state: 'closed', sort: 'created_asc', labels: %w[Bugs Features] } } + + it 'fills params with standard of ZenTao' do + expect_query_option_include(status: 'closed', order: 'openedDate_asc', labels: 'Bugs,Features') + end + end + + context 'when params contain invalid options' do + let(:params) { { state: 'xxx', sort: 'xxx', labels: %w[xxx] } } + + it 'fills default params with standard of ZenTao' do + expect_query_option_include(status: 'opened', order: 'lastEditedDate_desc', labels: 'xxx') + end + end + end + + describe '#issue' do + let(:response) { { 'issue' => { 'id' => 'story-1' } } } + + before do + expect_next_instance_of(Gitlab::Zentao::Client) do |client| + expect(client).to receive(:fetch_issue) + .and_return(response) + end + end + + it 'returns issue object by client' do + expect(query.issue).to include('id' => 'story-1') + end + end +end diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb index 3f39d969dbd..53048ae2e6b 100644 --- a/spec/lib/marginalia_spec.rb +++ b/spec/lib/marginalia_spec.rb @@ -59,14 +59,14 @@ RSpec.describe 'Marginalia spec' do "application" => "test", "endpoint_id" => "MarginaliaTestController#first_user", "correlation_id" => correlation_id, - "db_config_name" => "ci" + "db_config_name" => ENV['GITLAB_LOAD_BALANCING_REUSE_PRIMARY_ci'] == 'main' ? 'main' : 'ci' } end - before do |example| + before do skip_if_multiple_databases_not_setup - allow(User).to receive(:connection) { Ci::CiDatabaseRecord.connection } + allow(User).to receive(:connection) { Ci::ApplicationRecord.connection } end it 'generates a query that includes the component and value' do diff --git a/spec/lib/object_storage/config_spec.rb b/spec/lib/object_storage/config_spec.rb index 21b8a44b3d6..9a0e83bfd5e 100644 --- a/spec/lib/object_storage/config_spec.rb +++ b/spec/lib/object_storage/config_spec.rb @@ -36,46 +36,6 @@ RSpec.describe ObjectStorage::Config do subject { described_class.new(raw_config.as_json) } - describe '#load_provider' do - before do - subject.load_provider - end - - context 'with AWS' do - it 'registers AWS as a provider' do - expect(Fog.providers.keys).to include(:aws) - end - end - - context 'with Google' do - let(:credentials) do - { - provider: 'Google', - google_storage_access_key_id: 'GOOGLE_ACCESS_KEY_ID', - google_storage_secret_access_key: 'GOOGLE_SECRET_ACCESS_KEY' - } - end - - it 'registers Google as a provider' do - expect(Fog.providers.keys).to include(:google) - end - end - - context 'with Azure' do - let(:credentials) do - { - provider: 'AzureRM', - azure_storage_account_name: 'azuretest', - azure_storage_access_key: 'ABCD1234' - } - end - - it 'registers AzureRM as a provider' do - expect(Fog.providers.keys).to include(:azurerm) - end - end - end - describe '#credentials' do it { expect(subject.credentials).to eq(credentials) } end diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb index 006f4f603b6..1629aec89f5 100644 --- a/spec/lib/object_storage/direct_upload_spec.rb +++ b/spec/lib/object_storage/direct_upload_spec.rb @@ -201,10 +201,6 @@ RSpec.describe ObjectStorage::DirectUpload do end shared_examples 'a valid AzureRM upload' do - before do - require 'fog/azurerm' - end - it_behaves_like 'a valid upload' it 'enables the Workhorse client' do diff --git a/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb b/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb new file mode 100644 index 00000000000..ecd1602dd9e --- /dev/null +++ b/spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb @@ -0,0 +1,163 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Security::CiConfiguration::SastIacBuildAction do + subject(:result) { described_class.new(auto_devops_enabled, gitlab_ci_content).generate } + + let(:params) { {} } + + context 'with existing .gitlab-ci.yml' do + let(:auto_devops_enabled) { false } + + context 'sast iac has not been included' do + let(:expected_yml) do + <<-CI_YML.strip_heredoc + # You can override the included template(s) by including variable overrides + # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings + # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings + # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings + # Note that environment variables can be set in several places + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence + stages: + - test + - security + variables: + RANDOM: make sure this persists + include: + - template: existing.yml + - template: Security/SAST-IaC.latest.gitlab-ci.yml + CI_YML + end + + context 'template includes are an array' do + let(:gitlab_ci_content) do + { "stages" => %w(test security), + "variables" => { "RANDOM" => "make sure this persists" }, + "include" => [{ "template" => "existing.yml" }] } + end + + it 'generates the correct YML' do + expect(result[:action]).to eq('update') + expect(result[:content]).to eq(expected_yml) + end + end + + context 'template include is not an array' do + let(:gitlab_ci_content) do + { "stages" => %w(test security), + "variables" => { "RANDOM" => "make sure this persists" }, + "include" => { "template" => "existing.yml" } } + end + + it 'generates the correct YML' do + expect(result[:action]).to eq('update') + expect(result[:content]).to eq(expected_yml) + end + end + end + + context 'secret_detection has been included' do + let(:expected_yml) do + <<-CI_YML.strip_heredoc + # You can override the included template(s) by including variable overrides + # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings + # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings + # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings + # Note that environment variables can be set in several places + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence + stages: + - test + variables: + RANDOM: make sure this persists + include: + - template: Security/SAST-IaC.latest.gitlab-ci.yml + CI_YML + end + + context 'secret_detection template include are an array' do + let(:gitlab_ci_content) do + { "stages" => %w(test), + "variables" => { "RANDOM" => "make sure this persists" }, + "include" => [{ "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" }] } + end + + it 'generates the correct YML' do + expect(result[:action]).to eq('update') + expect(result[:content]).to eq(expected_yml) + end + end + + context 'secret_detection template include is not an array' do + let(:gitlab_ci_content) do + { "stages" => %w(test), + "variables" => { "RANDOM" => "make sure this persists" }, + "include" => { "template" => "Security/SAST-IaC.latest.gitlab-ci.yml" } } + end + + it 'generates the correct YML' do + expect(result[:action]).to eq('update') + expect(result[:content]).to eq(expected_yml) + end + end + end + end + + context 'with no .gitlab-ci.yml' do + let(:gitlab_ci_content) { nil } + + context 'autodevops disabled' do + let(:auto_devops_enabled) { false } + let(:expected_yml) do + <<-CI_YML.strip_heredoc + # You can override the included template(s) by including variable overrides + # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings + # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings + # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings + # Note that environment variables can be set in several places + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence + include: + - template: Security/SAST-IaC.latest.gitlab-ci.yml + CI_YML + end + + it 'generates the correct YML' do + expect(result[:action]).to eq('create') + expect(result[:content]).to eq(expected_yml) + end + end + + context 'with autodevops enabled' do + let(:auto_devops_enabled) { true } + let(:expected_yml) do + <<-CI_YML.strip_heredoc + # You can override the included template(s) by including variable overrides + # SAST customization: https://docs.gitlab.com/ee/user/application_security/sast/#customizing-the-sast-settings + # Secret Detection customization: https://docs.gitlab.com/ee/user/application_security/secret_detection/#customizing-settings + # Dependency Scanning customization: https://docs.gitlab.com/ee/user/application_security/dependency_scanning/#customizing-the-dependency-scanning-settings + # Note that environment variables can be set in several places + # See https://docs.gitlab.com/ee/ci/variables/#cicd-variable-precedence + include: + - template: Auto-DevOps.gitlab-ci.yml + CI_YML + end + + before do + allow_next_instance_of(described_class) do |sast_iac_build_actions| + allow(sast_iac_build_actions).to receive(:auto_devops_stages).and_return(fast_auto_devops_stages) + end + end + + it 'generates the correct YML' do + expect(result[:action]).to eq('create') + expect(result[:content]).to eq(expected_yml) + end + end + end + + # stubbing this method allows this spec file to use fast_spec_helper + def fast_auto_devops_stages + auto_devops_template = YAML.safe_load( File.read('lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml') ) + auto_devops_template['stages'] + end +end diff --git a/spec/lib/sidebars/groups/menus/invite_team_members_menu_spec.rb b/spec/lib/sidebars/groups/menus/invite_team_members_menu_spec.rb new file mode 100644 index 00000000000..a79e5182f45 --- /dev/null +++ b/spec/lib/sidebars/groups/menus/invite_team_members_menu_spec.rb @@ -0,0 +1,55 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Groups::Menus::InviteTeamMembersMenu do + let_it_be(:owner) { create(:user) } + let_it_be(:guest) { create(:user) } + let_it_be(:group) do + build(:group).tap do |g| + g.add_owner(owner) + end + end + + let(:context) { Sidebars::Groups::Context.new(current_user: owner, container: group) } + + subject(:invite_menu) { described_class.new(context) } + + context 'when the group is viewed by an owner of the group' do + describe '#render?' do + it 'renders the Invite team members link' do + expect(invite_menu.render?).to eq(true) + end + + context 'when the group already has at least 2 members' do + before do + group.add_guest(guest) + end + + it 'does not render the link' do + expect(invite_menu.render?).to eq(false) + end + end + end + + describe '#title' do + it 'displays the correct Invite team members text for the link in the side nav' do + expect(invite_menu.title).to eq('Invite members') + end + end + end + + context 'when the group is viewed by a guest user without admin permissions' do + let(:context) { Sidebars::Groups::Context.new(current_user: guest, container: group) } + + before do + group.add_guest(guest) + end + + describe '#render?' do + it 'does not render the link' do + expect(subject.render?).to eq(false) + end + end + end +end diff --git a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb index 5ebd67462f8..e954d7a44ba 100644 --- a/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb +++ b/spec/lib/sidebars/groups/menus/packages_registries_menu_spec.rb @@ -137,16 +137,27 @@ RSpec.describe Sidebars::Groups::Menus::PackagesRegistriesMenu do stub_config(dependency_proxy: { enabled: dependency_enabled }) end - context 'when config dependency_proxy is enabled' do - let(:dependency_enabled) { true } + context 'when user can read dependency proxy' do + context 'when config dependency_proxy is enabled' do + let(:dependency_enabled) { true } - it 'the menu item is added to list of menu items' do - is_expected.not_to be_nil + it 'the menu item is added to list of menu items' do + is_expected.not_to be_nil + end + end + + context 'when config dependency_proxy is not enabled' do + let(:dependency_enabled) { false } + + it 'the menu item is not added to list of menu items' do + is_expected.to be_nil + end end end - context 'when config dependency_proxy is not enabled' do - let(:dependency_enabled) { false } + context 'when user cannot read dependency proxy' do + let(:user) { nil } + let(:dependency_enabled) { true } it 'the menu item is not added to list of menu items' do is_expected.to be_nil diff --git a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb index 2415598da9c..55281171634 100644 --- a/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/infrastructure_menu_spec.rb @@ -51,6 +51,16 @@ RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do it 'menu link points to Terraform page' do expect(subject.link).to eq find_menu_item(:terraform).link end + + context 'when Terraform menu is not visible' do + before do + subject.renderable_items.delete(find_menu_item(:terraform)) + end + + it 'menu link points to Google Cloud page' do + expect(subject.link).to eq find_menu_item(:google_cloud).link + end + end end end @@ -89,5 +99,11 @@ RSpec.describe Sidebars::Projects::Menus::InfrastructureMenu do it_behaves_like 'access rights checks' end + + describe 'Google Cloud' do + let(:item_id) { :google_cloud } + + it_behaves_like 'access rights checks' + end end end diff --git a/spec/lib/sidebars/projects/menus/invite_team_members_menu_spec.rb b/spec/lib/sidebars/projects/menus/invite_team_members_menu_spec.rb new file mode 100644 index 00000000000..df9b260d211 --- /dev/null +++ b/spec/lib/sidebars/projects/menus/invite_team_members_menu_spec.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::Menus::InviteTeamMembersMenu do + let_it_be(:project) { create(:project) } + let_it_be(:guest) { create(:user) } + + let(:context) { Sidebars::Projects::Context.new(current_user: owner, container: project) } + + subject(:invite_menu) { described_class.new(context) } + + context 'when the project is viewed by an owner of the group' do + let(:owner) { project.owner } + + describe '#render?' do + it 'renders the Invite team members link' do + expect(invite_menu.render?).to eq(true) + end + + context 'when the project already has at least 2 members' do + before do + project.add_guest(guest) + end + + it 'does not render the link' do + expect(invite_menu.render?).to eq(false) + end + end + end + + describe '#title' do + it 'displays the correct Invite team members text for the link in the side nav' do + expect(invite_menu.title).to eq('Invite members') + end + end + end + + context 'when the project is viewed by a guest user without admin permissions' do + let(:context) { Sidebars::Projects::Context.new(current_user: guest, container: project) } + + before do + project.add_guest(guest) + end + + describe '#render?' do + it 'does not render' do + expect(invite_menu.render?).to eq(false) + end + end + end +end diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb index 3079c781d73..1e5d41dfec4 100644 --- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb +++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb @@ -162,24 +162,10 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do describe 'Usage Quotas' do let(:item_id) { :usage_quotas } - describe 'with project_storage_ui feature flag enabled' do - before do - stub_feature_flags(project_storage_ui: true) - end - - specify { is_expected.not_to be_nil } - - describe 'when the user does not have access' do - let(:user) { nil } - - specify { is_expected.to be_nil } - end - end + specify { is_expected.not_to be_nil } - describe 'with project_storage_ui feature flag disabled' do - before do - stub_feature_flags(project_storage_ui: false) - end + describe 'when the user does not have access' do + let(:user) { nil } specify { is_expected.to be_nil } end diff --git a/spec/lib/sidebars/projects/menus/zentao_menu_spec.rb b/spec/lib/sidebars/projects/menus/zentao_menu_spec.rb new file mode 100644 index 00000000000..f0bce6b7ea5 --- /dev/null +++ b/spec/lib/sidebars/projects/menus/zentao_menu_spec.rb @@ -0,0 +1,7 @@ +# frozen_string_literal: true + +require 'spec_helper' + +RSpec.describe Sidebars::Projects::Menus::ZentaoMenu do + it_behaves_like 'ZenTao menu with CE version' +end diff --git a/spec/lib/system_check/incoming_email_check_spec.rb b/spec/lib/system_check/incoming_email_check_spec.rb index 710702b93fc..5d93b810045 100644 --- a/spec/lib/system_check/incoming_email_check_spec.rb +++ b/spec/lib/system_check/incoming_email_check_spec.rb @@ -28,7 +28,7 @@ RSpec.describe SystemCheck::IncomingEmailCheck do it 'runs IMAP and mailroom checks' do expect(SystemCheck).to receive(:run).with('Reply by email', [ SystemCheck::IncomingEmail::ImapAuthenticationCheck, - SystemCheck::IncomingEmail::InitdConfiguredCheck, + SystemCheck::IncomingEmail::MailRoomEnabledCheck, SystemCheck::IncomingEmail::MailRoomRunningCheck ]) @@ -43,7 +43,7 @@ RSpec.describe SystemCheck::IncomingEmailCheck do it 'runs mailroom checks' do expect(SystemCheck).to receive(:run).with('Reply by email', [ - SystemCheck::IncomingEmail::InitdConfiguredCheck, + SystemCheck::IncomingEmail::MailRoomEnabledCheck, SystemCheck::IncomingEmail::MailRoomRunningCheck ]) diff --git a/spec/lib/uploaded_file_spec.rb b/spec/lib/uploaded_file_spec.rb index ececc84bc93..0aba6cb0065 100644 --- a/spec/lib/uploaded_file_spec.rb +++ b/spec/lib/uploaded_file_spec.rb @@ -15,7 +15,7 @@ RSpec.describe UploadedFile do end context 'from_params functions' do - RSpec.shared_examples 'using the file path' do |filename:, content_type:, sha256:, path_suffix:| + RSpec.shared_examples 'using the file path' do |filename:, content_type:, sha256:, path_suffix:, upload_duration:| it { is_expected.not_to be_nil } it 'sets properly the attributes' do @@ -24,6 +24,7 @@ RSpec.describe UploadedFile do expect(subject.sha256).to eq(sha256) expect(subject.remote_id).to be_nil expect(subject.path).to end_with(path_suffix) + expect(subject.upload_duration).to eq(upload_duration) end it 'handles a blank path' do @@ -37,16 +38,17 @@ RSpec.describe UploadedFile do end end - RSpec.shared_examples 'using the remote id' do |filename:, content_type:, sha256:, size:, remote_id:| + RSpec.shared_examples 'using the remote id' do |filename:, content_type:, sha256:, size:, remote_id:, upload_duration:| it { is_expected.not_to be_nil } it 'sets properly the attributes' do expect(subject.original_filename).to eq(filename) - expect(subject.content_type).to eq('application/octet-stream') - expect(subject.sha256).to eq('sha256') + expect(subject.content_type).to eq(content_type) + expect(subject.sha256).to eq(sha256) expect(subject.path).to be_nil - expect(subject.size).to eq(123456) - expect(subject.remote_id).to eq('1234567890') + expect(subject.size).to eq(size) + expect(subject.remote_id).to eq(remote_id) + expect(subject.upload_duration).to eq(upload_duration) end end @@ -78,6 +80,7 @@ RSpec.describe UploadedFile do { 'path' => temp_file.path, 'name' => 'dir/my file&.txt', 'type' => 'my/type', + 'upload_duration' => '5.05', 'sha256' => 'sha256' } end @@ -85,7 +88,8 @@ RSpec.describe UploadedFile do filename: 'my_file_.txt', content_type: 'my/type', sha256: 'sha256', - path_suffix: 'test' + path_suffix: 'test', + upload_duration: 5.05 end context 'with a remote id' do @@ -96,6 +100,7 @@ RSpec.describe UploadedFile do 'remote_url' => 'http://localhost/file', 'remote_id' => '1234567890', 'etag' => 'etag1234567890', + 'upload_duration' => '5.05', 'size' => '123456' } end @@ -105,7 +110,8 @@ RSpec.describe UploadedFile do content_type: 'application/octet-stream', sha256: 'sha256', size: 123456, - remote_id: '1234567890' + remote_id: '1234567890', + upload_duration: 5.05 end context 'with a path and a remote id' do @@ -117,6 +123,7 @@ RSpec.describe UploadedFile do 'remote_url' => 'http://localhost/file', 'remote_id' => '1234567890', 'etag' => 'etag1234567890', + 'upload_duration' => '5.05', 'size' => '123456' } end @@ -126,7 +133,8 @@ RSpec.describe UploadedFile do content_type: 'application/octet-stream', sha256: 'sha256', size: 123456, - remote_id: '1234567890' + remote_id: '1234567890', + upload_duration: 5.05 end end end @@ -216,6 +224,44 @@ RSpec.describe UploadedFile do end.to raise_error(UploadedFile::UnknownSizeError, 'Unable to determine file size') end end + + context 'when upload_duration is not provided' do + it 'sets upload_duration to zero' do + file = described_class.new(temp_file.path) + + expect(file.upload_duration).to be_zero + end + end + + context 'when upload_duration is provided' do + let(:file) { described_class.new(temp_file.path, upload_duration: duration) } + + context 'and upload_duration is a number' do + let(:duration) { 5.505 } + + it 'sets the upload_duration' do + expect(file.upload_duration).to eq(duration) + end + end + + context 'and upload_duration is a string' do + context 'and represents a number' do + let(:duration) { '5.505' } + + it 'converts upload_duration to a number' do + expect(file.upload_duration).to eq(duration.to_f) + end + end + + context 'and does not represent a number' do + let(:duration) { 'not a number' } + + it 'sets upload_duration to zero' do + expect(file.upload_duration).to be_zero + end + end + end + end end describe '#sanitize_filename' do |