Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'spec/scripts')
-rw-r--r--spec/scripts/database/schema_validator_spec.rb67
-rw-r--r--spec/scripts/generate_rspec_pipeline_spec.rb198
-rw-r--r--spec/scripts/lib/glfm/shared_spec.rb3
-rw-r--r--spec/scripts/pipeline/create_test_failure_issues_spec.rb145
-rw-r--r--spec/scripts/pipeline_test_report_builder_spec.rb5
-rw-r--r--spec/scripts/review_apps/automated_cleanup_spec.rb261
6 files changed, 678 insertions, 1 deletions
diff --git a/spec/scripts/database/schema_validator_spec.rb b/spec/scripts/database/schema_validator_spec.rb
new file mode 100644
index 00000000000..13be8e291da
--- /dev/null
+++ b/spec/scripts/database/schema_validator_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../../scripts/database/schema_validator'
+
+RSpec.describe SchemaValidator, feature_category: :database do
+ subject(:validator) { described_class.new }
+
+ describe "#validate!" do
+ before do
+ allow(validator).to receive(:committed_migrations).and_return(committed_migrations)
+ allow(validator).to receive(:run).and_return(schema_changes)
+ end
+
+ context 'when schema changes are introduced without migrations' do
+ let(:committed_migrations) { [] }
+ let(:schema_changes) { 'db/structure.sql' }
+
+ it 'terminates the execution' do
+ expect { validator.validate! }.to raise_error(SystemExit)
+ end
+ end
+
+ context 'when schema changes are introduced with migrations' do
+ let(:committed_migrations) { ['20211006103122_my_migration.rb'] }
+ let(:schema_changes) { 'db/structure.sql' }
+ let(:command) { 'git diff db/structure.sql -- db/structure.sql' }
+ let(:base_message) { 'db/structure.sql was changed, and no migrations were added' }
+
+ before do
+ allow(validator).to receive(:die)
+ end
+
+ it 'skips schema validations' do
+ expect(validator.validate!).to be_nil
+ end
+ end
+
+ context 'when skipping validations through ENV variable' do
+ let(:committed_migrations) { [] }
+ let(:schema_changes) { 'db/structure.sql' }
+
+ before do
+ stub_env('ALLOW_SCHEMA_CHANGES', true)
+ end
+
+ it 'skips schema validations' do
+ expect(validator.validate!).to be_nil
+ end
+ end
+
+ context 'when skipping validations through commit message' do
+ let(:committed_migrations) { [] }
+ let(:schema_changes) { 'db/structure.sql' }
+ let(:commit_message) { "Changes db/strucure.sql file\nskip-db-structure-check" }
+
+ before do
+ allow(validator).to receive(:run).and_return(commit_message)
+ end
+
+ it 'skips schema validations' do
+ expect(validator.validate!).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/scripts/generate_rspec_pipeline_spec.rb b/spec/scripts/generate_rspec_pipeline_spec.rb
new file mode 100644
index 00000000000..b3eaf9e9127
--- /dev/null
+++ b/spec/scripts/generate_rspec_pipeline_spec.rb
@@ -0,0 +1,198 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'tempfile'
+
+require_relative '../../scripts/generate_rspec_pipeline'
+
+RSpec.describe GenerateRspecPipeline, :silence_stdout, feature_category: :tooling do
+ describe '#generate!' do
+ let!(:rspec_files) { Tempfile.new(['rspec_files_path', '.txt']) }
+ let(:rspec_files_content) do
+ "spec/migrations/a_spec.rb spec/migrations/b_spec.rb " \
+ "spec/lib/gitlab/background_migration/a_spec.rb spec/lib/gitlab/background_migration/b_spec.rb " \
+ "spec/models/a_spec.rb spec/models/b_spec.rb " \
+ "spec/controllers/a_spec.rb spec/controllers/b_spec.rb " \
+ "spec/features/a_spec.rb spec/features/b_spec.rb"
+ end
+
+ let(:pipeline_template) { Tempfile.new(['pipeline_template', '.yml.erb']) }
+ let(:pipeline_template_content) do
+ <<~YAML
+ <% if rspec_files_per_test_level[:migration][:files].size > 0 %>
+ rspec migration:
+ <% if rspec_files_per_test_level[:migration][:parallelization] > 1 %>
+ parallel: <%= rspec_files_per_test_level[:migration][:parallelization] %>
+ <% end %>
+ <% end %>
+ <% if rspec_files_per_test_level[:background_migration][:files].size > 0 %>
+ rspec background_migration:
+ <% if rspec_files_per_test_level[:background_migration][:parallelization] > 1 %>
+ parallel: <%= rspec_files_per_test_level[:background_migration][:parallelization] %>
+ <% end %>
+ <% end %>
+ <% if rspec_files_per_test_level[:unit][:files].size > 0 %>
+ rspec unit:
+ <% if rspec_files_per_test_level[:unit][:parallelization] > 1 %>
+ parallel: <%= rspec_files_per_test_level[:unit][:parallelization] %>
+ <% end %>
+ <% end %>
+ <% if rspec_files_per_test_level[:integration][:files].size > 0 %>
+ rspec integration:
+ <% if rspec_files_per_test_level[:integration][:parallelization] > 1 %>
+ parallel: <%= rspec_files_per_test_level[:integration][:parallelization] %>
+ <% end %>
+ <% end %>
+ <% if rspec_files_per_test_level[:system][:files].size > 0 %>
+ rspec system:
+ <% if rspec_files_per_test_level[:system][:parallelization] > 1 %>
+ parallel: <%= rspec_files_per_test_level[:system][:parallelization] %>
+ <% end %>
+ <% end %>
+ YAML
+ end
+
+ let(:knapsack_report) { Tempfile.new(['knapsack_report', '.json']) }
+ let(:knapsack_report_content) do
+ <<~JSON
+ {
+ "spec/migrations/a_spec.rb": 360.3,
+ "spec/migrations/b_spec.rb": 180.1,
+ "spec/lib/gitlab/background_migration/a_spec.rb": 60.5,
+ "spec/lib/gitlab/background_migration/b_spec.rb": 180.3,
+ "spec/models/a_spec.rb": 360.2,
+ "spec/models/b_spec.rb": 180.6,
+ "spec/controllers/a_spec.rb": 60.2,
+ "spec/controllers/ab_spec.rb": 180.4,
+ "spec/features/a_spec.rb": 360.1,
+ "spec/features/b_spec.rb": 180.5
+ }
+ JSON
+ end
+
+ around do |example|
+ rspec_files.write(rspec_files_content)
+ rspec_files.rewind
+ pipeline_template.write(pipeline_template_content)
+ pipeline_template.rewind
+ knapsack_report.write(knapsack_report_content)
+ knapsack_report.rewind
+ example.run
+ ensure
+ rspec_files.close
+ rspec_files.unlink
+ pipeline_template.close
+ pipeline_template.unlink
+ knapsack_report.close
+ knapsack_report.unlink
+ end
+
+ context 'when rspec_files and pipeline_template_path exists' do
+ subject do
+ described_class.new(
+ rspec_files_path: rspec_files.path,
+ pipeline_template_path: pipeline_template.path
+ )
+ end
+
+ it 'generates the pipeline config with default parallelization' do
+ subject.generate!
+
+ expect(File.read("#{pipeline_template.path}.yml"))
+ .to eq(
+ "rspec migration:\nrspec background_migration:\nrspec unit:\n" \
+ "rspec integration:\nrspec system:"
+ )
+ end
+
+ context 'when parallelization > 0' do
+ before do
+ stub_const("#{described_class}::DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS", 360)
+ end
+
+ it 'generates the pipeline config' do
+ subject.generate!
+
+ expect(File.read("#{pipeline_template.path}.yml"))
+ .to eq(
+ "rspec migration:\n parallel: 2\nrspec background_migration:\n parallel: 2\n" \
+ "rspec unit:\n parallel: 2\nrspec integration:\n parallel: 2\n" \
+ "rspec system:\n parallel: 2"
+ )
+ end
+ end
+
+ context 'when parallelization > MAX_NODES_COUNT' do
+ let(:rspec_files_content) do
+ Array.new(51) { |i| "spec/migrations/#{i}_spec.rb" }.join(' ')
+ end
+
+ before do
+ stub_const(
+ "#{described_class}::DEFAULT_AVERAGE_TEST_FILE_DURATION_IN_SECONDS",
+ described_class::OPTIMAL_TEST_JOB_DURATION_IN_SECONDS
+ )
+ end
+
+ it 'generates the pipeline config with max parallelization of 50' do
+ subject.generate!
+
+ expect(File.read("#{pipeline_template.path}.yml")).to eq("rspec migration:\n parallel: 50")
+ end
+ end
+ end
+
+ context 'when knapsack_report_path is given' do
+ subject do
+ described_class.new(
+ rspec_files_path: rspec_files.path,
+ pipeline_template_path: pipeline_template.path,
+ knapsack_report_path: knapsack_report.path
+ )
+ end
+
+ it 'generates the pipeline config with parallelization based on Knapsack' do
+ subject.generate!
+
+ expect(File.read("#{pipeline_template.path}.yml"))
+ .to eq(
+ "rspec migration:\n parallel: 2\nrspec background_migration:\n" \
+ "rspec unit:\n parallel: 2\nrspec integration:\n" \
+ "rspec system:\n parallel: 2"
+ )
+ end
+
+ context 'and Knapsack report does not contain valid JSON' do
+ let(:knapsack_report_content) { "#{super()}," }
+
+ it 'generates the pipeline config with default parallelization' do
+ subject.generate!
+
+ expect(File.read("#{pipeline_template.path}.yml"))
+ .to eq(
+ "rspec migration:\nrspec background_migration:\nrspec unit:\n" \
+ "rspec integration:\nrspec system:"
+ )
+ end
+ end
+ end
+
+ context 'when rspec_files does not exist' do
+ subject { described_class.new(rspec_files_path: nil, pipeline_template_path: pipeline_template.path) }
+
+ it 'generates the pipeline config using the no-op template' do
+ subject.generate!
+
+ expect(File.read("#{pipeline_template.path}.yml")).to include("no-op:")
+ end
+ end
+
+ context 'when pipeline_template_path does not exist' do
+ subject { described_class.new(rspec_files_path: rspec_files.path, pipeline_template_path: nil) }
+
+ it 'generates the pipeline config using the no-op template' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/scripts/lib/glfm/shared_spec.rb b/spec/scripts/lib/glfm/shared_spec.rb
index 3717c7ce18f..d407bd49d75 100644
--- a/spec/scripts/lib/glfm/shared_spec.rb
+++ b/spec/scripts/lib/glfm/shared_spec.rb
@@ -1,8 +1,9 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'tmpdir'
require_relative '../../../../scripts/lib/glfm/shared'
-RSpec.describe Glfm::Shared do
+RSpec.describe Glfm::Shared, feature_category: :team_planning do
let(:instance) do
Class.new do
include Glfm::Shared
diff --git a/spec/scripts/pipeline/create_test_failure_issues_spec.rb b/spec/scripts/pipeline/create_test_failure_issues_spec.rb
new file mode 100644
index 00000000000..fa27727542e
--- /dev/null
+++ b/spec/scripts/pipeline/create_test_failure_issues_spec.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+# rubocop:disable RSpec/VerifiedDoubles
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+require_relative '../../../scripts/pipeline/create_test_failure_issues'
+
+RSpec.describe CreateTestFailureIssues, feature_category: :tooling do
+ describe CreateTestFailureIssue do
+ let(:env) do
+ {
+ 'CI_JOB_URL' => 'ci_job_url',
+ 'CI_PIPELINE_URL' => 'ci_pipeline_url'
+ }
+ end
+
+ let(:project) { 'group/project' }
+ let(:api_token) { 'api_token' }
+ let(:creator) { described_class.new(project: project, api_token: api_token) }
+ let(:test_name) { 'The test description' }
+ let(:test_file) { 'spec/path/to/file_spec.rb' }
+ let(:test_file_content) do
+ <<~CONTENT
+ # comment
+
+ RSpec.describe Foo, feature_category: :source_code_management do
+ end
+
+ CONTENT
+ end
+
+ let(:test_file_stub) { double(read: test_file_content) }
+ let(:failed_test) do
+ {
+ 'name' => test_name,
+ 'file' => test_file,
+ 'job_url' => 'job_url'
+ }
+ end
+
+ let(:categories_mapping) do
+ {
+ 'source_code_management' => {
+ 'group' => 'source_code',
+ 'label' => 'Category:Source Code Management'
+ }
+ }
+ end
+
+ let(:groups_mapping) do
+ {
+ 'source_code' => {
+ 'label' => 'group::source_code'
+ }
+ }
+ end
+
+ before do
+ stub_env(env)
+ end
+
+ describe '#find' do
+ let(:expected_payload) do
+ {
+ state: 'opened',
+ search: "#{failed_test['file']} - ID: #{Digest::SHA256.hexdigest(failed_test['name'])[0...12]}"
+ }
+ end
+
+ let(:find_issue_stub) { double('FindIssues') }
+ let(:issue_stub) { double(title: expected_payload[:title], web_url: 'issue_web_url') }
+
+ before do
+ allow(creator).to receive(:puts)
+ end
+
+ it 'calls FindIssues#execute(payload)' do
+ expect(FindIssues).to receive(:new).with(project: project, api_token: api_token).and_return(find_issue_stub)
+ expect(find_issue_stub).to receive(:execute).with(expected_payload).and_return([issue_stub])
+
+ creator.find(failed_test)
+ end
+
+ context 'when no issues are found' do
+ it 'calls FindIssues#execute(payload)' do
+ expect(FindIssues).to receive(:new).with(project: project, api_token: api_token).and_return(find_issue_stub)
+ expect(find_issue_stub).to receive(:execute).with(expected_payload).and_return([])
+
+ creator.find(failed_test)
+ end
+ end
+ end
+
+ describe '#create' do
+ let(:expected_description) do
+ <<~DESCRIPTION
+ ### Full description
+
+ `#{failed_test['name']}`
+
+ ### File path
+
+ `#{failed_test['file']}`
+
+ <!-- Don't add anything after the report list since it's updated automatically -->
+ ### Reports
+
+ - #{failed_test['job_url']} (#{env['CI_PIPELINE_URL']})
+ DESCRIPTION
+ end
+
+ let(:expected_payload) do
+ {
+ title: "#{failed_test['file']} - ID: #{Digest::SHA256.hexdigest(failed_test['name'])[0...12]}",
+ description: expected_description,
+ labels: described_class::DEFAULT_LABELS.map { |label| "wip-#{label}" } + [
+ "wip-#{categories_mapping['source_code_management']['label']}", "wip-#{groups_mapping['source_code']['label']}" # rubocop:disable Layout/LineLength
+ ]
+ }
+ end
+
+ let(:create_issue_stub) { double('CreateIssue') }
+ let(:issue_stub) { double(title: expected_payload[:title], web_url: 'issue_web_url') }
+
+ before do
+ allow(creator).to receive(:puts)
+ allow(File).to receive(:open).and_call_original
+ allow(File).to receive(:open).with(File.expand_path(File.join('..', '..', '..', test_file), __dir__))
+ .and_return(test_file_stub)
+ allow(creator).to receive(:categories_mapping).and_return(categories_mapping)
+ allow(creator).to receive(:groups_mapping).and_return(groups_mapping)
+ end
+
+ it 'calls CreateIssue#execute(payload)' do
+ expect(CreateIssue).to receive(:new).with(project: project, api_token: api_token).and_return(create_issue_stub)
+ expect(create_issue_stub).to receive(:execute).with(expected_payload).and_return(issue_stub)
+
+ creator.create(failed_test) # rubocop:disable Rails/SaveBang
+ end
+ end
+ end
+end
+# rubocop:enable RSpec/VerifiedDoubles
diff --git a/spec/scripts/pipeline_test_report_builder_spec.rb b/spec/scripts/pipeline_test_report_builder_spec.rb
index e7529eb0d41..bee2a4a5835 100644
--- a/spec/scripts/pipeline_test_report_builder_spec.rb
+++ b/spec/scripts/pipeline_test_report_builder_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe PipelineTestReportBuilder, feature_category: :tooling do
let(:options) do
described_class::DEFAULT_OPTIONS.merge(
target_project: 'gitlab-org/gitlab',
+ current_pipeline_id: '42',
mr_id: '999',
instance_base_url: 'https://gitlab.com',
output_file_path: output_file_path
@@ -191,10 +192,14 @@ RSpec.describe PipelineTestReportBuilder, feature_category: :tooling do
context 'for latest pipeline' do
let(:failed_build_uri) { "#{latest_pipeline_url}/tests/suite.json?build_ids[]=#{failed_build_id}" }
+ let(:current_pipeline_uri) do
+ "#{options[:api_endpoint]}/projects/#{options[:target_project]}/pipelines/#{options[:current_pipeline_id]}"
+ end
subject { described_class.new(options.merge(pipeline_index: :latest)) }
it 'fetches builds from pipeline related to MR' do
+ expect(subject).to receive(:fetch).with(current_pipeline_uri).and_return(mr_pipelines[0])
expect(subject).to receive(:fetch).with(failed_build_uri).and_return(test_report_for_build)
subject.test_report_for_pipeline
diff --git a/spec/scripts/review_apps/automated_cleanup_spec.rb b/spec/scripts/review_apps/automated_cleanup_spec.rb
new file mode 100644
index 00000000000..546bf55a934
--- /dev/null
+++ b/spec/scripts/review_apps/automated_cleanup_spec.rb
@@ -0,0 +1,261 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'time'
+require_relative '../../../scripts/review_apps/automated_cleanup'
+
+RSpec.describe ReviewApps::AutomatedCleanup, feature_category: :tooling do
+ let(:instance) { described_class.new(options: options) }
+ let(:options) do
+ {
+ project_path: 'my-project-path',
+ gitlab_token: 'glpat-test-secret-token',
+ api_endpoint: 'gitlab.test/api/v4',
+ dry_run: dry_run
+ }
+ end
+
+ let(:kubernetes_client) { instance_double(Tooling::KubernetesClient) }
+ let(:helm_client) { instance_double(Tooling::Helm3Client) }
+ let(:gitlab_client) { double('GitLab') } # rubocop:disable RSpec/VerifiedDoubles
+ let(:dry_run) { false }
+ let(:now) { Time.now }
+ let(:one_day_ago) { (now - (1 * 24 * 3600)) }
+ let(:two_days_ago) { (now - (2 * 24 * 3600)) }
+ let(:three_days_ago) { (now - (3 * 24 * 3600)) }
+
+ before do
+ allow(instance).to receive(:gitlab).and_return(gitlab_client)
+ allow(Time).to receive(:now).and_return(now)
+ allow(Tooling::Helm3Client).to receive(:new).and_return(helm_client)
+ allow(Tooling::KubernetesClient).to receive(:new).and_return(kubernetes_client)
+
+ allow(kubernetes_client).to receive(:cleanup_by_created_at)
+ allow(kubernetes_client).to receive(:cleanup_by_release)
+ allow(kubernetes_client).to receive(:cleanup_review_app_namespaces)
+ allow(kubernetes_client).to receive(:delete_namespaces_by_exact_names)
+ end
+
+ shared_examples 'the days argument is an integer in the correct range' do
+ context 'when days is nil' do
+ let(:days) { nil }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('days should be an integer between 1 and 365 inclusive! Got 0')
+ end
+ end
+
+ context 'when days is zero' do
+ let(:days) { 0 }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('days should be an integer between 1 and 365 inclusive! Got 0')
+ end
+ end
+
+ context 'when days is above 365' do
+ let(:days) { 366 }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error('days should be an integer between 1 and 365 inclusive! Got 366')
+ end
+ end
+
+ context 'when days is a string' do
+ let(:days) { '10' }
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when days is a float' do
+ let(:days) { 3.0 }
+
+ it 'does not raise an error' do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+
+ describe '#perform_stale_pvc_cleanup!' do
+ subject { instance.perform_stale_pvc_cleanup!(days: days) }
+
+ let(:days) { 2 }
+
+ it_behaves_like 'the days argument is an integer in the correct range'
+
+ it 'performs Kubernetes cleanup by created at' do
+ expect(kubernetes_client).to receive(:cleanup_by_created_at).with(
+ resource_type: 'pvc',
+ created_before: two_days_ago,
+ wait: false
+ )
+
+ subject
+ end
+
+ context 'when the dry-run flag is true' do
+ let(:dry_run) { true }
+
+ it 'does not delete anything' do
+ expect(kubernetes_client).not_to receive(:cleanup_by_created_at)
+ end
+ end
+ end
+
+ describe '#perform_stale_namespace_cleanup!' do
+ subject { instance.perform_stale_namespace_cleanup!(days: days) }
+
+ let(:days) { 2 }
+
+ it_behaves_like 'the days argument is an integer in the correct range'
+
+ it 'performs Kubernetes cleanup for review apps namespaces' do
+ expect(kubernetes_client).to receive(:cleanup_review_app_namespaces).with(
+ created_before: two_days_ago,
+ wait: false
+ )
+
+ subject
+ end
+
+ context 'when the dry-run flag is true' do
+ let(:dry_run) { true }
+
+ it 'does not delete anything' do
+ expect(kubernetes_client).not_to receive(:cleanup_review_app_namespaces)
+ end
+ end
+ end
+
+ describe '#perform_helm_releases_cleanup!' do
+ subject { instance.perform_helm_releases_cleanup!(days: days) }
+
+ let(:days) { 2 }
+ let(:helm_releases) { [] }
+
+ before do
+ allow(helm_client).to receive(:releases).and_return(helm_releases)
+
+ # Silence outputs to stdout
+ allow(instance).to receive(:puts)
+ end
+
+ shared_examples 'deletes the helm release' do
+ let(:releases_names) { helm_releases.map(&:name) }
+
+ before do
+ allow(helm_client).to receive(:delete)
+ allow(kubernetes_client).to receive(:cleanup_by_release)
+ allow(kubernetes_client).to receive(:delete_namespaces_by_exact_names)
+ end
+
+ it 'deletes the helm release' do
+ expect(helm_client).to receive(:delete).with(release_name: releases_names)
+
+ subject
+ end
+
+ it 'empties the k8s resources in the k8s namespace for the release' do
+ expect(kubernetes_client).to receive(:cleanup_by_release).with(release_name: releases_names, wait: false)
+
+ subject
+ end
+
+ it 'deletes the associated k8s namespace' do
+ expect(kubernetes_client).to receive(:delete_namespaces_by_exact_names).with(
+ resource_names: releases_names, wait: false
+ )
+
+ subject
+ end
+ end
+
+ shared_examples 'does not delete the helm release' do
+ it 'does not delete the helm release' do
+ expect(helm_client).not_to receive(:delete)
+
+ subject
+ end
+
+ it 'does not empty the k8s resources in the k8s namespace for the release' do
+ expect(kubernetes_client).not_to receive(:cleanup_by_release)
+
+ subject
+ end
+
+ it 'does not delete the associated k8s namespace' do
+ expect(kubernetes_client).not_to receive(:delete_namespaces_by_exact_names)
+
+ subject
+ end
+ end
+
+ shared_examples 'does nothing on a dry run' do
+ it_behaves_like 'does not delete the helm release'
+ end
+
+ it_behaves_like 'the days argument is an integer in the correct range'
+
+ context 'when the helm release is not a review-app release' do
+ let(:helm_releases) do
+ [
+ Tooling::Helm3Client::Release.new(
+ name: 'review-apps', namespace: 'review-apps', revision: 1, status: 'success', updated: three_days_ago.to_s
+ )
+ ]
+ end
+
+ it_behaves_like 'does not delete the helm release'
+ end
+
+ context 'when the helm release is a review-app release' do
+ let(:helm_releases) do
+ [
+ Tooling::Helm3Client::Release.new(
+ name: 'review-test', namespace: 'review-test', revision: 1, status: status, updated: updated_at
+ )
+ ]
+ end
+
+ context 'when the helm release was deployed recently enough' do
+ let(:updated_at) { one_day_ago.to_s }
+
+ context 'when the helm release is in failed state' do
+ let(:status) { 'failed' }
+
+ it_behaves_like 'deletes the helm release'
+
+ context 'when the dry-run flag is true' do
+ let(:dry_run) { true }
+
+ it_behaves_like 'does nothing on a dry run'
+ end
+ end
+
+ context 'when the helm release is not in failed state' do
+ let(:status) { 'success' }
+
+ it_behaves_like 'does not delete the helm release'
+ end
+ end
+
+ context 'when the helm release was deployed a while ago' do
+ let(:updated_at) { three_days_ago.to_s }
+
+ context 'when the helm release is in failed state' do
+ let(:status) { 'failed' }
+
+ it_behaves_like 'deletes the helm release'
+ end
+
+ context 'when the helm release is not in failed state' do
+ let(:status) { 'success' }
+
+ it_behaves_like 'deletes the helm release'
+ end
+ end
+ end
+ end
+end