Welcome to mirror list, hosted at ThFree Co, Russian Federation.

expire_pipeline_cache_service_spec.rb « ci « services « spec - gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
blob: 8cfe756faf3dde84c210e1a5376f92e855b06efa (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
# frozen_string_literal: true

require 'spec_helper'

RSpec.describe Ci::ExpirePipelineCacheService do
  let_it_be(:user) { create(:user) }
  let_it_be(:project) { create(:project) }
  let_it_be(:pipeline) { create(:ci_pipeline, project: project) }

  subject { described_class.new }

  describe '#execute' do
    it 'invalidates Etag caching for project pipelines path' do
      pipelines_path = "/#{project.full_path}/-/pipelines.json"
      new_mr_pipelines_path = "/#{project.full_path}/-/merge_requests/new.json"
      pipeline_path = "/#{project.full_path}/-/pipelines/#{pipeline.id}.json"
      graphql_pipeline_path = "/api/graphql:pipelines/id/#{pipeline.id}"
      graphql_pipeline_sha_path = "/api/graphql:pipelines/sha/#{pipeline.sha}"
      graphql_project_on_demand_scan_counts_path = "/api/graphql:on_demand_scan/counts/#{project.full_path}"

      expect_touched_etag_caching_paths(
        pipelines_path,
        new_mr_pipelines_path,
        pipeline_path,
        graphql_pipeline_path,
        graphql_pipeline_sha_path,
        graphql_project_on_demand_scan_counts_path
      )

      subject.execute(pipeline)
    end

    it 'invalidates Etag caching for merge request pipelines if pipeline runs on any commit of that source branch' do
      merge_request = create(:merge_request, :with_detached_merge_request_pipeline)
      project = merge_request.target_project

      merge_request_pipelines_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/pipelines.json"
      merge_request_widget_path = "/#{project.full_path}/-/merge_requests/#{merge_request.iid}/cached_widget.json"

      expect_touched_etag_caching_paths(
        merge_request_pipelines_path,
        merge_request_widget_path
      )

      subject.execute(merge_request.all_pipelines.last)
    end

    it 'updates the cached status for a project' do
      expect(Gitlab::Cache::Ci::ProjectPipelineStatus).to receive(:update_for_pipeline).with(pipeline)

      subject.execute(pipeline)
    end

    context 'destroyed pipeline' do
      let(:project_with_repo) { create(:project, :repository) }
      let!(:pipeline_with_commit) { create(:ci_pipeline, :success, project: project_with_repo, sha: project_with_repo.commit.id) }

      it 'clears the cache', :use_clean_rails_redis_caching do
        create(:commit_status, :success, pipeline: pipeline_with_commit, ref: pipeline_with_commit.ref)

        # Sanity check
        expect(project_with_repo.pipeline_status.has_status?).to be_truthy

        subject.execute(pipeline_with_commit, delete: true)

        pipeline_with_commit.destroy!

        # We need to reset lazy_latest_pipeline cache to simulate a new request
        BatchLoader::Executor.clear_current

        # Need to use find to avoid memoization
        expect(Project.find(project_with_repo.id).pipeline_status.has_status?).to be_falsey
      end
    end

    context 'when the pipeline is triggered by another pipeline' do
      let(:source) { create(:ci_sources_pipeline, pipeline: pipeline) }

      it 'updates the cache of dependent pipeline' do
        dependent_pipeline_path = "/#{source.source_project.full_path}/-/pipelines/#{source.source_pipeline.id}.json"

        expect_touched_etag_caching_paths(dependent_pipeline_path)

        subject.execute(pipeline)
      end
    end

    context 'when the pipeline triggered another pipeline' do
      let(:build) { create(:ci_build, pipeline: pipeline) }
      let(:source) { create(:ci_sources_pipeline, source_job: build) }

      it 'updates the cache of dependent pipeline' do
        dependent_pipeline_path = "/#{source.project.full_path}/-/pipelines/#{source.pipeline.id}.json"

        expect_touched_etag_caching_paths(dependent_pipeline_path)

        subject.execute(pipeline)
      end
    end

    it 'does not do N+1 queries' do
      subject.execute(pipeline)

      control = ActiveRecord::QueryRecorder.new { subject.execute(pipeline) }

      create(:ci_sources_pipeline, pipeline: pipeline)
      create(:ci_sources_pipeline, source_job: create(:ci_build, pipeline: pipeline))

      expect { subject.execute(pipeline) }.not_to exceed_query_limit(control.count)
    end
  end

  def expect_touched_etag_caching_paths(*paths)
    expect_next_instance_of(Gitlab::EtagCaching::Store) do |store|
      expect(store).to receive(:touch).and_wrap_original do |m, *args|
        expect(args).to include(*paths)

        m.call(*args)
      end
    end
  end
end