Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
path: root/spec/lib
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2024-01-10 06:12:01 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2024-01-10 06:12:01 +0300
commitfea86fb8bf2339727de5e91ccf17ab105e993dca (patch)
tree25ddd67b8131643fa648f052eb29d527d72bdda3 /spec/lib
parentec4891efa777d951afdbff95557bbcf5fda00188 (diff)
Add latest changes from gitlab-org/gitlab@master
Diffstat (limited to 'spec/lib')
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb90
-rw-r--r--spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb263
-rw-r--r--spec/lib/gitlab/security/scan_configuration_spec.rb47
3 files changed, 377 insertions, 23 deletions
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb
new file mode 100644
index 00000000000..103df128dac
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects/job_artifact_object_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjects::JobArtifactObject, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ let(:job_artifact_object) do
+ described_class.new(
+ fog_file,
+ bucket_prefix: bucket_prefix
+ )
+ end
+
+ # rubocop:disable RSpec/VerifiedDoubles -- For some reason it can't see Fog::AWS::Storage::File
+ let(:fog_file) { double(key: fog_file_key, content_length: 145) }
+ # rubocop:enable RSpec/VerifiedDoubles
+
+ let(:fog_file_key) { 'aaa/bbb/123' }
+ let(:bucket_prefix) { nil }
+
+ describe '#path' do
+ subject { job_artifact_object.path }
+
+ it { is_expected.to eq(fog_file.key) }
+ end
+
+ describe '#size' do
+ subject { job_artifact_object.size }
+
+ it { is_expected.to eq(fog_file.content_length) }
+ end
+
+ describe '#in_final_location?' do
+ subject { job_artifact_object.in_final_location? }
+
+ context 'when path has @final in it' do
+ let(:fog_file_key) { 'aaa/bbb/@final/123/ccc' }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when path has no @final in it' do
+ let(:fog_file_key) { 'aaa/bbb/ccc' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#orphan?' do
+ shared_examples_for 'identifying orphan object' do
+ let(:artifact_final_path) { 'aaa/@final/bbb' }
+ let(:fog_file_key) { File.join([bucket_prefix, artifact_final_path].compact) }
+
+ subject { job_artifact_object.orphan? }
+
+ context 'when there is job artifact record with a file_final_path that matches the object path' do
+ before do
+ # We don't store the bucket_prefix if ever in the file_final_path
+ create(:ci_job_artifact, file_final_path: artifact_final_path)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when there are no job artifact records with a file_final_path that matches the object path' do
+ context 'and there is a pending direct upload entry that matches the object path' do
+ before do
+ # We don't store the bucket_prefix if ever in the pending direct upload entry
+ ObjectStorage::PendingDirectUpload.prepare(:artifacts, artifact_final_path)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'and there are no pending direct upload entries that match the object path' do
+ it { is_expected.to eq(true) }
+ end
+ end
+ end
+
+ context 'when bucket prefix is not present' do
+ it_behaves_like 'identifying orphan object'
+ end
+
+ context 'when bucket prefix is present' do
+ let(:bucket_prefix) { 'my/prefix' }
+
+ it_behaves_like 'identifying orphan object'
+ end
+ end
+end
diff --git a/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb
new file mode 100644
index 00000000000..aeb87bc0d9e
--- /dev/null
+++ b/spec/lib/gitlab/cleanup/orphan_job_artifact_final_objects_cleaner_spec.rb
@@ -0,0 +1,263 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Cleanup::OrphanJobArtifactFinalObjectsCleaner, :orphan_final_artifacts_cleanup, :clean_gitlab_redis_shared_state, feature_category: :build_artifacts do
+ describe '#run!' do
+ let(:cleaner) do
+ described_class.new(
+ provider: specified_provider,
+ force_restart: force_restart,
+ dry_run: dry_run
+ )
+ end
+
+ let(:dry_run) { true }
+ let(:force_restart) { false }
+ let(:remote_directory) { 'artifacts' }
+ let(:bucket_prefix) { nil }
+
+ subject(:run) { cleaner.run! }
+
+ before do
+ stub_const('Gitlab::Cleanup::OrphanJobArtifactFinalObjects::Paginators::BasePaginator::BATCH_SIZE', 2)
+
+ Rake.application.rake_require 'tasks/gitlab/cleanup'
+
+ Gitlab.config.artifacts.object_store.tap do |config|
+ config[:remote_directory] = remote_directory
+ config[:bucket_prefix] = bucket_prefix
+ end
+
+ allow(Gitlab::AppLogger).to receive(:info)
+ end
+
+ shared_examples_for 'cleaning up orphan final job artifact objects' do
+ let(:fog_connection) do
+ stub_object_storage_uploader(
+ config: Gitlab.config.artifacts.object_store,
+ uploader: JobArtifactUploader,
+ direct_upload: true
+ )
+ end
+
+ let!(:orphan_final_object_1) { create_fog_file }
+ let!(:orphan_final_object_2) { create_fog_file }
+ let!(:orphan_non_final_object) { create_fog_file(final: false) }
+
+ let!(:non_orphan_final_object_1) do
+ create_fog_file.tap do |file|
+ create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
+ end
+ end
+
+ let!(:non_orphan_final_object_2) do
+ create_fog_file.tap do |file|
+ create(:ci_job_artifact, file_final_path: path_without_bucket_prefix(file.key))
+ end
+ end
+
+ shared_context 'when resuming from marker' do
+ let(:dummy_error) { Class.new(StandardError) }
+
+ before do
+ fetch_counter = 0
+
+ allow(cleaner).to receive(:fetch_batch).and_wrap_original do |m, *args|
+ raise dummy_error if fetch_counter == 1
+
+ fetch_counter += 1
+ m.call(*args)
+ end
+ end
+ end
+
+ shared_examples_for 'handling dry run mode' do
+ context 'when on dry run (which is default)' do
+ it 'logs orphan objects to delete but does not delete them' do
+ run
+
+ expect_start_log_message
+ expect_first_page_loading_log_message
+ expect_page_loading_via_marker_log_message(times: 3)
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+ expect_no_delete_log_message(orphan_non_final_object)
+ expect_no_delete_log_message(non_orphan_final_object_1)
+ expect_no_delete_log_message(non_orphan_final_object_2)
+ expect_done_log_message
+
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ expect_object_to_exist(orphan_non_final_object)
+ expect_object_to_exist(non_orphan_final_object_1)
+ expect_object_to_exist(non_orphan_final_object_2)
+ end
+
+ context 'when interrupted in the middle of processing pages' do
+ include_context 'when resuming from marker'
+
+ it 'resumes from last known page marker on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+ saved_marker = fetch_saved_marker
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: false,
+ dry_run: true
+ )
+
+ new_cleaner.run!
+
+ expect_resuming_from_marker_log_message(saved_marker)
+
+ # Given we can't guarantee the order of the objects because
+ # of random path generation, we can't tell which page they will
+ # fall in, so we will just ensure that they
+ # were all logged in the end.
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+
+ # Ensure that they were not deleted because this is just dry run.
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ end
+
+ context 'and force_restart is true' do
+ it 'starts from the first page on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: true,
+ dry_run: true
+ )
+
+ new_cleaner.run!
+
+ expect_no_resuming_from_marker_log_message
+
+ # Ensure that they were not deleted because this is just dry run.
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+ end
+ end
+ end
+ end
+
+ context 'when dry run is set to false' do
+ let(:dry_run) { false }
+
+ it 'logs orphan objects to delete and deletes them' do
+ expect_object_to_exist(orphan_final_object_1)
+ expect_object_to_exist(orphan_final_object_2)
+
+ run
+
+ expect_start_log_message
+ expect_first_page_loading_log_message
+ expect_page_loading_via_marker_log_message(times: 3)
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+ expect_no_delete_log_message(orphan_non_final_object)
+ expect_no_delete_log_message(non_orphan_final_object_1)
+ expect_no_delete_log_message(non_orphan_final_object_2)
+ expect_done_log_message
+
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ expect_object_to_exist(orphan_non_final_object)
+ expect_object_to_exist(non_orphan_final_object_1)
+ expect_object_to_exist(non_orphan_final_object_2)
+ end
+
+ context 'when interrupted in the middle of processing pages' do
+ include_context 'when resuming from marker'
+
+ it 'resumes from last known page marker on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+ saved_marker = fetch_saved_marker
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: false,
+ dry_run: false
+ )
+
+ new_cleaner.run!
+
+ expect_resuming_from_marker_log_message(saved_marker)
+
+ # Given we can't guarantee the order of the objects because
+ # of random path generation, we can't tell which page they will
+ # fall in, so we will just ensure that they
+ # were all logged in the end.
+ expect_delete_log_message(orphan_final_object_1)
+ expect_delete_log_message(orphan_final_object_2)
+
+ # Ensure that they were deleted because this is not dry run.
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ end
+
+ context 'and force_restart is true' do
+ it 'starts from the first page on the next run' do
+ expect { cleaner.run! }.to raise_error(dummy_error)
+
+ new_cleaner = described_class.new(
+ provider: specified_provider,
+ force_restart: true,
+ dry_run: false
+ )
+
+ new_cleaner.run!
+
+ expect_no_resuming_from_marker_log_message
+
+ # Ensure that they were deleted because this is not a dry run.
+ expect_object_to_be_deleted(orphan_final_object_1)
+ expect_object_to_be_deleted(orphan_final_object_2)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when not configured to use bucket_prefix' do
+ let(:remote_directory) { 'artifacts' }
+ let(:bucket_prefix) { nil }
+
+ it_behaves_like 'handling dry run mode'
+ end
+
+ context 'when configured to use bucket_prefix' do
+ let(:remote_directory) { 'main-bucket' }
+ let(:bucket_prefix) { 'my/artifacts' }
+
+ it_behaves_like 'handling dry run mode'
+ end
+ end
+
+ context 'when defaulting to provider in the object store configuration' do
+ let(:specified_provider) { nil }
+
+ it_behaves_like 'cleaning up orphan final job artifact objects'
+ end
+
+ context 'when provider is specified' do
+ context 'and provider is supported' do
+ let(:specified_provider) { 'aws' }
+
+ it_behaves_like 'cleaning up orphan final job artifact objects'
+ end
+
+ context 'and provider is not supported' do
+ let(:specified_provider) { 'somethingelse' }
+
+ it 'raises an error' do
+ expect { run }.to raise_error(described_class::UnsupportedProviderError)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/security/scan_configuration_spec.rb b/spec/lib/gitlab/security/scan_configuration_spec.rb
index 706f6664a41..491be85584b 100644
--- a/spec/lib/gitlab/security/scan_configuration_spec.rb
+++ b/spec/lib/gitlab/security/scan_configuration_spec.rb
@@ -97,13 +97,13 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
short_name: "SAST",
description: "Analyze your source code for known vulnerabilities.",
help_path: "/help/user/application_security/sast/index",
- config_help_path: "/help/user/application_security/sast/index#configuration",
+ configuration_help_path: "/help/user/application_security/sast/index#configuration",
type: "sast" }
:sast_iac | { name: "Infrastructure as Code (IaC) Scanning",
- short_name: "ciReport|SAST IaC",
+ short_name: "SAST IaC",
description: "Analyze your infrastructure as code configuration files for known vulnerabilities.",
help_path: "/help/user/application_security/iac_scanning/index",
- config_help_path: "/help/user/application_security/iac_scanning/index#configuration",
+ configuration_help_path: "/help/user/application_security/iac_scanning/index#configuration",
type: "sast_iac" }
:dast | {
badge: { text: "Available on demand",
@@ -113,34 +113,34 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
secondary: {
type: "dast_profiles",
name: "DAST profiles",
- description: "SecurityConfiguration|Manage profiles for use by DAST scans.",
- configuration_text: "SecurityConfiguration|Manage profiles"
+ description: "Manage profiles for use by DAST scans.",
+ configuration_text: "Manage profiles"
},
name: "Dynamic Application Security Testing (DAST)",
- short_name: "ciReport|DAST",
- description: "ciReport|Analyze a deployed version of your web application for known " \
+ short_name: "DAST",
+ description: "Analyze a deployed version of your web application for known " \
"vulnerabilities by examining it from the outside in. DAST works by simulating " \
"external attacks on your application while it is running.",
help_path: "/help/user/application_security/dast/index",
- config_help_path: "/help/user/application_security/dast/index#enable-automatic-dast-run",
+ configuration_help_path: "/help/user/application_security/dast/index#enable-automatic-dast-run",
type: "dast",
anchor: "dast"
}
:dependency_scanning | { name: "Dependency Scanning",
description: "Analyze your dependencies for known vulnerabilities.",
help_path: "/help/user/application_security/dependency_scanning/index",
- config_help_path: "/help/user/application_security/dependency_scanning/index#configuration",
+ configuration_help_path: "/help/user/application_security/dependency_scanning/index#configuration",
type: "dependency_scanning",
anchor: "dependency-scanning" }
:container_scanning | { name: "Container Scanning",
description: "Check your Docker images for known vulnerabilities.",
help_path: "/help/user/application_security/container_scanning/index",
- config_help_path: "/help/user/application_security/container_scanning/index#configuration",
+ configuration_help_path: "/help/user/application_security/container_scanning/index#configuration",
type: "container_scanning" }
:secret_detection | { name: "Secret Detection",
description: "Analyze your source code and git history for secrets.",
help_path: "/help/user/application_security/secret_detection/index",
- config_help_path: "/help/user/application_security/secret_detection/index#configuration",
+ configuration_help_path: "/help/user/application_security/secret_detection/index#configuration",
type: "secret_detection" }
:api_fuzzing | { name: "API Fuzzing",
description: "Find bugs in your code with API fuzzing.",
@@ -149,32 +149,33 @@ RSpec.describe ::Gitlab::Security::ScanConfiguration do
:coverage_fuzzing | { name: "Coverage Fuzzing",
description: "Find bugs in your code with coverage-guided fuzzing.",
help_path: "/help/user/application_security/coverage_fuzzing/index",
- config_help_path: "/help/user/application_security/coverage_fuzzing/index#enable-coverage-guided-fuzz-testing",
+ configuration_help_path: \
+ "/help/user/application_security/coverage_fuzzing/index#enable-coverage-guided-fuzz-testing",
type: "coverage_fuzzing",
secondary: { type: "corpus_management",
name: "Corpus Management",
- description: "SecurityConfiguration|Manage corpus files used as " \
+ description: "Manage corpus files used as " \
"seed inputs with coverage-guided fuzzing.",
- configuration_text: "SecurityConfiguration|Manage corpus" } }
+ configuration_text: "Manage corpus" } }
:breach_and_attack_simulation | { anchor: "bas",
badge: { always_display: true,
- text: "SecurityConfiguration|Incubating feature",
- tooltip_text: "SecurityConfiguration|Breach and Attack Simulation is an incubating feature " \
+ text: "Incubating feature",
+ tooltip_text: "Breach and Attack Simulation is an incubating feature " \
"extending existing security " \
"testing by simulating adversary activity.",
variant: "info" },
- description: "SecurityConfiguration|Simulate breach and attack scenarios against your running " \
+ description: "Simulate breach and attack scenarios against your running " \
"application by attempting to detect " \
"and exploit known vulnerabilities.",
- name: "SecurityConfiguration|Breach and Attack Simulation (BAS)",
+ name: "Breach and Attack Simulation (BAS)",
help_path: "/help/user/application_security/breach_and_attack_simulation/index",
- secondary: { config_help_path: "/help/user/application_security/breach_and_attack_simulation/" \
- "index#extend-dynamic-application-security-testing-dast",
- description: "SecurityConfiguration|Enable incubating Breach and " \
+ secondary: { configuration_help_path: "/help/user/application_security/breach_and_attack_simulation/" \
+ "index#extend-dynamic-application-security-testing-dast",
+ description: "Enable incubating Breach and " \
"Attack Simulation focused features " \
"such as callback attacks in your DAST scans.",
- name: "SecurityConfiguration|Out-of-Band Application Security Testing (OAST)" },
- short_name: "SecurityConfiguration|BAS",
+ name: "Out-of-Band Application Security Testing (OAST)" },
+ short_name: "BAS",
type: "breach_and_attack_simulation" }
:invalid | {}
end