Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-11-18 16:16:36 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2021-11-18 16:16:36 +0300
commit311b0269b4eb9839fa63f80c8d7a58f32b8138a0 (patch)
tree07e7870bca8aed6d61fdcc810731c50d2c40af47 /scripts
parent27909cef6c4170ed9205afa7426b8d3de47cbb0c (diff)
Add latest changes from gitlab-org/gitlab@14-5-stable-eev14.5.0-rc42
Diffstat (limited to 'scripts')
-rw-r--r--scripts/api/default_options.rb7
-rwxr-xr-xscripts/changed-feature-flags59
-rwxr-xr-xscripts/docs_screenshots.rb2
-rwxr-xr-xscripts/failed_tests.rb122
-rwxr-xr-xscripts/lint-doc.sh2
-rwxr-xr-xscripts/no-dir-check9
-rwxr-xr-xscripts/no-ee-check9
-rwxr-xr-xscripts/pipeline_test_report_builder.rb153
-rwxr-xr-xscripts/regenerate-schema4
-rwxr-xr-xscripts/review_apps/review-apps.sh7
-rw-r--r--scripts/rspec_helpers.sh42
-rwxr-xr-xscripts/schema_changed.sh40
-rwxr-xr-xscripts/security-harness3
-rwxr-xr-xscripts/static-analysis6
-rwxr-xr-xscripts/trigger-build3
-rwxr-xr-xscripts/used-feature-flags16
-rw-r--r--scripts/utils.sh2
-rwxr-xr-xscripts/verify-tff-mapping4
18 files changed, 441 insertions, 49 deletions
diff --git a/scripts/api/default_options.rb b/scripts/api/default_options.rb
index 70fb9683733..d10666e3a68 100644
--- a/scripts/api/default_options.rb
+++ b/scripts/api/default_options.rb
@@ -9,3 +9,10 @@ module API
endpoint: ENV['CI_API_V4_URL'] || 'https://gitlab.com/api/v4'
}.freeze
end
+
+module Host
+ DEFAULT_OPTIONS = {
+ instance_base_url: ENV['CI_SERVER_URL'],
+ mr_id: ENV['CI_MERGE_REQUEST_ID']
+ }.freeze
+end
diff --git a/scripts/changed-feature-flags b/scripts/changed-feature-flags
new file mode 100755
index 00000000000..3a4f18bd78f
--- /dev/null
+++ b/scripts/changed-feature-flags
@@ -0,0 +1,59 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'yaml'
+require 'optparse'
+require_relative 'api/default_options'
+
+# This script returns the desired feature flag state as a comma-separated string for the feature flags in the specified files.
+# Each desired feature flag state is specified as 'feature-flag=state'.
+#
+# For example, if the specified files included `config/feature_flags/development/ci_yaml_limit_size.yml` and the desired
+# state as specified by the second argument was enabled, the value returned would be `ci_yaml_limit_size=enabled`
+
+class GetFeatureFlagsFromFiles
+ def initialize(options)
+ @files = options.delete(:files)
+ @state = options.delete(:state)
+ end
+
+ def extracted_flags
+ files.each_with_object([]) do |file_path, all|
+ next unless file_path =~ %r{/feature_flags/(development|ops)/.*\.yml}
+ next unless File.exist?(file_path)
+
+ ff_yaml = YAML.safe_load(File.read(file_path))
+ ff_to_add = "#{ff_yaml['name']}"
+ ff_to_add += "=#{state}" unless state.to_s.empty?
+
+ all << ff_to_add
+ end.join(',')
+ end
+
+ private
+
+ attr_reader :files, :state
+end
+
+if $0 == __FILE__
+ options = API::DEFAULT_OPTIONS.dup
+
+ OptionParser.new do |opts|
+ opts.on("-f", "--files FILES", Array, "Comma-separated list of feature flag config files") do |value|
+ options[:files] = value
+ end
+
+ opts.on("-s", "--state STATE", String,
+ "The desired state of the feature flags (enabled or disabled). If not specified the output will only list the feature flags."
+ ) do |value|
+ options[:state] = value
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ puts GetFeatureFlagsFromFiles.new(options).extracted_flags
+end
diff --git a/scripts/docs_screenshots.rb b/scripts/docs_screenshots.rb
index 094e7e87960..a734540eb69 100755
--- a/scripts/docs_screenshots.rb
+++ b/scripts/docs_screenshots.rb
@@ -5,7 +5,7 @@
require 'png_quantizator'
require 'open3'
require 'parallel'
-require_relative '../tooling/lib/tooling/images'
+require_relative '../tooling/lib/tooling/image'
generator = ARGV[0]
milestone = ARGV[1]
diff --git a/scripts/failed_tests.rb b/scripts/failed_tests.rb
new file mode 100755
index 00000000000..fb13df7bf62
--- /dev/null
+++ b/scripts/failed_tests.rb
@@ -0,0 +1,122 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'optparse'
+require 'fileutils'
+require 'uri'
+require 'json'
+require 'set'
+
+class FailedTests
+ def initialize(options)
+ @filename = options.delete(:previous_tests_report_path)
+ @output_directory = options.delete(:output_directory)
+ @rspec_pg_regex = options.delete(:rspec_pg_regex)
+ @rspec_ee_pg_regex = options.delete(:rspec_ee_pg_regex)
+ end
+
+ def output_failed_test_files
+ create_output_dir
+
+ failed_files_for_suite_collection.each do |suite_collection_name, suite_collection_files|
+ failed_test_files = suite_collection_files.map { |filepath| filepath.delete_prefix('./') }.join(' ')
+
+ output_file = File.join(output_directory, "#{suite_collection_name}_failed_files.txt")
+
+ File.open(output_file, 'w') do |file|
+ file.write(failed_test_files)
+ end
+ end
+ end
+
+ def failed_files_for_suite_collection
+ suite_map.each_with_object(Hash.new { |h, k| h[k] = Set.new }) do |(suite_collection_name, suite_collection_regex), hash|
+ failed_suites.each do |suite|
+ hash[suite_collection_name].merge(failed_files(suite)) if suite['name'] =~ suite_collection_regex
+ end
+ end
+ end
+
+ def suite_map
+ @suite_map ||= {
+ rspec: rspec_pg_regex,
+ rspec_ee: rspec_ee_pg_regex,
+ jest: /jest/
+ }
+ end
+
+ private
+
+ attr_reader :filename, :output_directory, :rspec_pg_regex, :rspec_ee_pg_regex
+
+ def file_contents
+ @file_contents ||= begin
+ File.read(filename)
+ rescue Errno::ENOENT
+ '{}'
+ end
+ end
+
+ def file_contents_as_json
+ @file_contents_as_json ||= begin
+ JSON.parse(file_contents)
+ rescue JSON::ParserError
+ {}
+ end
+ end
+
+ def failed_suites
+ return [] unless file_contents_as_json['suites']
+
+ file_contents_as_json['suites'].select { |suite| suite['failed_count'] > 0 }
+ end
+
+ def failed_files(suite)
+ return [] unless suite
+
+ suite['test_cases'].each_with_object([]) do |failure_hash, failed_cases|
+ failed_cases << failure_hash['file'] if failure_hash['status'] == 'failed'
+ end
+ end
+
+ def create_output_dir
+ return if File.directory?(output_directory)
+
+ puts 'Creating output directory...'
+ FileUtils.mkdir_p(output_directory)
+ end
+end
+
+if $0 == __FILE__
+ options = {
+ previous_tests_report_path: 'test_results/previous/test_reports.json',
+ output_directory: 'tmp/previous_failed_tests/',
+ rspec_pg_regex: /rspec .+ pg12( .+)?/,
+ rspec_ee_pg_regex: /rspec-ee .+ pg12( .+)?/
+ }
+
+ OptionParser.new do |opts|
+ opts.on("-p", "--previous-tests-report-path PREVIOUS_TESTS_REPORT_PATH", String, "Path of the file listing previous test failures") do |value|
+ options[:previous_tests_report_path] = value
+ end
+
+ opts.on("-o", "--output-directory OUTPUT_DIRECTORY", String, "Output directory for failed test files") do |value|
+ options[:output_directory] = value
+ end
+
+ opts.on("--rspec-pg-regex RSPEC_PG_REGEX", Regexp, "Regex to use when finding matching RSpec jobs") do |value|
+ options[:rspec_pg_regex] = value
+ end
+
+ opts.on("--rspec-ee-pg-regex RSPEC_EE_PG_REGEX", Regexp, "Regex to use when finding matching RSpec EE jobs") do |value|
+ options[:rspec_ee_pg_regex] = value
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ FailedTests.new(options).output_failed_test_files
+end
diff --git a/scripts/lint-doc.sh b/scripts/lint-doc.sh
index e99b8a47301..1698d724fd2 100755
--- a/scripts/lint-doc.sh
+++ b/scripts/lint-doc.sh
@@ -128,7 +128,7 @@ function run_locally_or_in_docker() {
$cmd $args
elif hash docker 2>/dev/null
then
- docker run -t -v ${PWD}:/gitlab -w /gitlab --rm registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.13-vale-2.10.2-markdownlint-0.26.0 ${cmd} ${args}
+ docker run -t -v ${PWD}:/gitlab -w /gitlab --rm registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.14-vale-2.12.0-markdownlint-0.29.0 ${cmd} ${args}
else
echo
echo " ✖ ERROR: '${cmd}' not found. Install '${cmd}' or Docker to proceed." >&2
diff --git a/scripts/no-dir-check b/scripts/no-dir-check
new file mode 100755
index 00000000000..bbb303da9b2
--- /dev/null
+++ b/scripts/no-dir-check
@@ -0,0 +1,9 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+dir_name = ARGV.first || abort('ERROR: Please specify a directory.')
+dir_path = File.expand_path(dir_name, "#{__dir__}/..")
+
+if Dir.exist?(dir_path)
+ abort("ERROR: This repository contains #{dir_name}/ directory. #{dir_name.upcase} changes should go to the corresponding repository.")
+end
diff --git a/scripts/no-ee-check b/scripts/no-ee-check
deleted file mode 100755
index a878a4424e9..00000000000
--- a/scripts/no-ee-check
+++ /dev/null
@@ -1,9 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-ee_path = File.join(File.expand_path(__dir__), '../ee')
-
-if Dir.exist?(ee_path)
- puts 'The repository contains /ee directory. There should be no /ee directory in CE repo.'
- exit 1
-end
diff --git a/scripts/pipeline_test_report_builder.rb b/scripts/pipeline_test_report_builder.rb
new file mode 100755
index 00000000000..2101decf59a
--- /dev/null
+++ b/scripts/pipeline_test_report_builder.rb
@@ -0,0 +1,153 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require 'optparse'
+require 'time'
+require 'fileutils'
+require 'uri'
+require 'cgi'
+require 'net/http'
+require 'json'
+require_relative 'api/default_options'
+
+# Request list of pipelines for MR
+# https://gitlab.com/api/v4/projects/gitlab-org%2Fgitlab/merge_requests/69053/pipelines
+# Find latest failed pipeline
+# Retrieve list of failed builds for test stage in pipeline
+# https://gitlab.com/api/v4/projects/gitlab-org%2Fgitlab/pipelines/363788864/jobs/?scope=failed
+# Retrieve test reports for these builds
+# https://gitlab.com/gitlab-org/gitlab/-/pipelines/363788864/tests/suite.json?build_ids[]=1555608749
+# Push into expected format for failed tests
+class PipelineTestReportBuilder
+ def initialize(options)
+ @target_project = options.delete(:target_project)
+ @mr_id = options.delete(:mr_id) || Host::DEFAULT_OPTIONS[:mr_id]
+ @instance_base_url = options.delete(:instance_base_url) || Host::DEFAULT_OPTIONS[:instance_base_url]
+ @output_file_path = options.delete(:output_file_path)
+ end
+
+ def test_report_for_latest_pipeline
+ build_test_report_json_for_pipeline(previous_pipeline)
+ end
+
+ def execute
+ if output_file_path
+ FileUtils.mkdir_p(File.dirname(output_file_path))
+ end
+
+ File.open(output_file_path, 'w') do |file|
+ file.write(test_report_for_latest_pipeline)
+ end
+ end
+
+ def previous_pipeline
+ # Top of the list will always be the current pipeline
+ # Second from top will be the previous pipeline
+ pipelines_for_mr.sort_by { |a| -Time.parse(a['created_at']).to_i }[1]
+ end
+
+ private
+
+ attr_reader :target_project, :mr_id, :instance_base_url, :output_file_path
+
+ def pipeline_project_api_base_url(pipeline)
+ "#{instance_base_url}/api/v4/projects/#{pipeline['project_id']}"
+ end
+
+ def target_project_api_base_url
+ "#{instance_base_url}/api/v4/projects/#{CGI.escape(target_project)}"
+ end
+
+ def pipelines_for_mr
+ fetch("#{target_project_api_base_url}/merge_requests/#{mr_id}/pipelines")
+ end
+
+ def failed_builds_for_pipeline(pipeline)
+ fetch("#{pipeline_project_api_base_url(pipeline)}/pipelines/#{pipeline['id']}/jobs?scope=failed&per_page=100")
+ end
+
+ # Method uses the test suite endpoint to gather test results for a particular build.
+ # Here we request individual builds, even though it is possible to supply multiple build IDs.
+ # The reason for this; it is possible to lose the job context and name when requesting multiple builds.
+ # Please see for more info: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/69053#note_709939709
+ def test_report_for_build(pipeline, build_id)
+ fetch("#{pipeline['web_url']}/tests/suite.json?build_ids[]=#{build_id}")
+ end
+
+ def build_test_report_json_for_pipeline(pipeline)
+ # empty file if no previous failed pipeline
+ return {}.to_json if pipeline.nil? || pipeline['status'] != 'failed'
+
+ test_report = {}
+
+ puts "Discovered last failed pipeline (#{pipeline['id']}) for MR!#{mr_id}"
+
+ failed_builds_for_test_stage = failed_builds_for_pipeline(pipeline).select do |failed_build|
+ failed_build['stage'] == 'test'
+ end
+
+ puts "#{failed_builds_for_test_stage.length} failed builds in test stage found..."
+
+ if failed_builds_for_test_stage.any?
+ test_report['suites'] ||= []
+
+ failed_builds_for_test_stage.each do |failed_build|
+ test_report['suites'] << test_report_for_build(pipeline, failed_build['id'])
+ end
+ end
+
+ test_report.to_json
+ end
+
+ def fetch(uri_str)
+ uri = URI(uri_str)
+
+ puts "URL: #{uri}"
+
+ request = Net::HTTP::Get.new(uri)
+
+ body = ''
+
+ Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http|
+ http.request(request) do |response|
+ case response
+ when Net::HTTPSuccess
+ body = response.read_body
+ else
+ raise "Unexpected response: #{response.value}"
+ end
+ end
+ end
+
+ JSON.parse(body)
+ end
+end
+
+if $0 == __FILE__
+ options = Host::DEFAULT_OPTIONS.dup
+
+ OptionParser.new do |opts|
+ opts.on("-t", "--target-project TARGET_PROJECT", String, "Project where to find the merge request") do |value|
+ options[:target_project] = value
+ end
+
+ opts.on("-m", "--mr-id MR_ID", String, "A merge request ID") do |value|
+ options[:mr_id] = value
+ end
+
+ opts.on("-i", "--instance-base-url INSTANCE_BASE_URL", String, "URL of the instance where project and merge request resides") do |value|
+ options[:instance_base_url] = value
+ end
+
+ opts.on("-o", "--output-file-path OUTPUT_PATH", String, "A path for output file") do |value|
+ options[:output_file_path] = value
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+ end.parse!
+
+ PipelineTestReportBuilder.new(options).execute
+end
diff --git a/scripts/regenerate-schema b/scripts/regenerate-schema
index 485bb2d5505..06230942dcd 100755
--- a/scripts/regenerate-schema
+++ b/scripts/regenerate-schema
@@ -126,7 +126,7 @@ class SchemaRegenerator
# In order to properly reset the database and re-run migrations
# the schema migrations for new migrations must be removed.
def remove_schema_migration_files
- (untracked_schema_migrations + commited_schema_migrations).each do |schema_migration|
+ (untracked_schema_migrations + committed_schema_migrations).each do |schema_migration|
FileUtils.rm(schema_migration)
end
end
@@ -144,7 +144,7 @@ class SchemaRegenerator
# List of untracked schema migrations
#
# Get a list of schema migrations that have been committed since the last
- def commited_schema_migrations
+ def committed_schema_migrations
git_command = "git diff --name-only --diff-filter=A #{merge_base} -- #{SCHEMA_MIGRATIONS_DIR}"
run(git_command).chomp.split("\n")
end
diff --git a/scripts/review_apps/review-apps.sh b/scripts/review_apps/review-apps.sh
index 8ec26e7ba89..edb55a83555 100755
--- a/scripts/review_apps/review-apps.sh
+++ b/scripts/review_apps/review-apps.sh
@@ -10,7 +10,12 @@ function deploy_exists() {
helm status --namespace "${namespace}" "${release}" >/dev/null 2>&1
deploy_exists=$?
- echoinfo "Deployment status for ${release} is ${deploy_exists}"
+ if [ $deploy_exists -eq 0 ]; then
+ echoinfo "Previous deployment for ${release} found."
+ else
+ echoerr "Previous deployment for ${release} NOT found."
+ fi
+
return $deploy_exists
}
diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh
index accc52a7ece..cabd2e6380c 100644
--- a/scripts/rspec_helpers.sh
+++ b/scripts/rspec_helpers.sh
@@ -26,6 +26,8 @@ function retrieve_tests_metadata() {
fi
if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
+ # Fixed ID to get the report back to a good state after https://gitlab.com/gitlab-org/gitlab/-/issues/345798 / https://gitlab.com/gitlab-org/gitlab/-/merge_requests/74617
+ test_metadata_job_id=1766932099
scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
fi
fi
@@ -89,6 +91,25 @@ function crystalball_rspec_data_exists() {
compgen -G "crystalball/rspec*.yml" >/dev/null
}
+function retrieve_previous_failed_tests() {
+ local directory_for_output_reports="${1}"
+ local rspec_pg_regex="${2}"
+ local rspec_ee_pg_regex="${3}"
+ local pipeline_report_path="test_results/previous/test_reports.json"
+
+ # Used to query merge requests. This variable reflects where the merge request has been created
+ local target_project_path="${CI_MERGE_REQUEST_PROJECT_PATH}"
+ local instance_url="${CI_SERVER_URL}"
+
+ echo 'Attempting to build pipeline test report...'
+
+ scripts/pipeline_test_report_builder.rb --instance-base-url "${instance_url}" --target-project "${target_project_path}" --mr-id "${CI_MERGE_REQUEST_IID}" --output-file-path "${pipeline_report_path}"
+
+ echo 'Generating failed tests lists...'
+
+ scripts/failed_tests.rb --previous-tests-report-path "${pipeline_report_path}" --output-directory "${directory_for_output_reports}" --rspec-pg-regex "${rspec_pg_regex}" --rspec-ee-pg-regex "${rspec_ee_pg_regex}"
+}
+
function rspec_simple_job() {
local rspec_opts="${1}"
@@ -140,6 +161,7 @@ function rspec_paralellized_job() {
fi
echo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN}"
+ echo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY}"
if [[ -d "ee/" ]]; then
export KNAPSACK_GENERATE_REPORT="true"
@@ -147,6 +169,7 @@ function rspec_paralellized_job() {
export SUITE_FLAKY_RSPEC_REPORT_PATH="${FLAKY_RSPEC_SUITE_REPORT_PATH}"
export FLAKY_RSPEC_REPORT_PATH="rspec_flaky/all_${report_name}_report.json"
export NEW_FLAKY_RSPEC_REPORT_PATH="rspec_flaky/new_${report_name}_report.json"
+ export SKIPPED_FLAKY_TESTS_REPORT_PATH="rspec_flaky/skipped_flaky_tests_${report_name}_report.txt"
if [[ ! -f $FLAKY_RSPEC_REPORT_PATH ]]; then
echo "{}" > "${FLAKY_RSPEC_REPORT_PATH}"
@@ -172,6 +195,25 @@ function rspec_paralellized_job() {
date
}
+function rspec_rerun_previous_failed_tests() {
+ local test_file_count_threshold=${RSPEC_PREVIOUS_FAILED_TEST_FILE_COUNT_THRESHOLD:-10}
+ local matching_tests_file=${1}
+ local rspec_opts=${2}
+ local test_files="$(cat "${matching_tests_file}")"
+ local test_file_count=$(wc -w "${matching_tests_file}" | awk {'print $1'})
+
+ if [[ "${test_file_count}" -gt "${test_file_count_threshold}" ]]; then
+ echo "This job is intentionally exited because there are more than ${test_file_count_threshold} test files to rerun."
+ exit 0
+ fi
+
+ if [[ -n $test_files ]]; then
+ rspec_simple_job "${test_files}"
+ else
+ echo "No failed test files to rerun"
+ fi
+}
+
function rspec_fail_fast() {
local test_file_count_threshold=${RSPEC_FAIL_FAST_TEST_FILE_COUNT_THRESHOLD:-10}
local matching_tests_file=${1}
diff --git a/scripts/schema_changed.sh b/scripts/schema_changed.sh
index f564f717e95..2fcb85df75b 100755
--- a/scripts/schema_changed.sh
+++ b/scripts/schema_changed.sh
@@ -1,25 +1,21 @@
#!/bin/sh
-schema_changed() {
- if [ ! -z "$(git diff --name-only -- db/structure.sql)" ]; then
- printf "Schema changes are not cleanly committed to db/structure.sql\n"
- printf "The diff is as follows:\n"
- diff=$(git diff -p --binary -- db/structure.sql)
- printf "%s" "$diff"
- exit 1
- else
- printf "Schema changes are correctly applied to db/structure.sql\n"
- fi
+if [ -n "$(git diff --name-only -- db/structure.sql)" ]; then
+ printf "Schema changes are not cleanly committed to db/structure.sql\n"
+ printf "The diff is as follows:\n"
+ diff=$(git diff -p --binary -- db/structure.sql)
+ printf "%s" "$diff"
+ exit 1
+else
+ printf "Schema changes are correctly applied to db/structure.sql\n"
+fi
- if [ ! -z "$(git add -A -n db/schema_migrations)" ]; then
- printf "Schema version files have not been committed to the repository:\n"
- printf "The following files should be committed:\n"
- diff=$(git add -A -n db/schema_migrations)
- printf "%s" "$diff"
- exit 2
- else
- printf "Schema changes are correctly applied to db/structure.sql and db/schema_migrations/\n"
- fi
-}
-
-schema_changed
+if [ -n "$(git add -A -n db/schema_migrations)" ]; then
+ printf "Schema version files have not been committed to the repository:\n"
+ printf "The following files should be committed:\n"
+ diff=$(git add -A -n db/schema_migrations)
+ printf "%s" "$diff"
+ exit 2
+else
+ printf "Schema changes are correctly applied to db/structure.sql and db/schema_migrations/\n"
+fi
diff --git a/scripts/security-harness b/scripts/security-harness
index ec062fc17cc..df499be23f5 100755
--- a/scripts/security-harness
+++ b/scripts/security-harness
@@ -4,7 +4,6 @@
require 'digest'
require 'fileutils'
-require 'open3'
if ENV['NO_COLOR']
SHELL_RED = ''
@@ -19,7 +18,7 @@ else
end
LEFTHOOK_GLOBAL_CONFIG_PATH = File.expand_path("../lefthook.yml", __dir__)
-HOOK_PATH = Open3.capture3("git rev-parse --path-format=absolute --git-path hooks/pre-push")[0].strip
+HOOK_PATH = `git rev-parse --path-format=absolute --git-path hooks/pre-push`.split.last
HOOK_DATA = <<~HOOK
#!/usr/bin/env bash
diff --git a/scripts/static-analysis b/scripts/static-analysis
index f50e4a24b58..9c6a948adc1 100755
--- a/scripts/static-analysis
+++ b/scripts/static-analysis
@@ -43,13 +43,7 @@ class StaticAnalysis
# contain values that a FOSS installation won't find. To work
# around this we will only enable this task on EE installations.
TASKS_WITH_DURATIONS_SECONDS = [
- Task.new(%w[bin/rake lint:haml], 562),
- # We need to disable the cache for this cop since it creates files under tmp/feature_flags/*.used,
- # the cache would prevent these files from being created.
- Task.new(%w[bundle exec rubocop --only Gitlab/MarkUsedFeatureFlags --cache false], 400),
(Gitlab.ee? ? Task.new(%w[bin/rake gettext:updated_check], 360) : nil),
- Task.new(%w[yarn run lint:eslint:all], 312),
- Task.new(%w[bundle exec rubocop --parallel], 60),
Task.new(%w[yarn run lint:prettier], 160),
Task.new(%w[bin/rake gettext:lint], 85),
Task.new(%W[bundle exec license_finder --decisions-file config/dependency_decisions.yml --project-path #{project_path}], 20),
diff --git a/scripts/trigger-build b/scripts/trigger-build
index 5af45ec09f2..e5fa55f8582 100755
--- a/scripts/trigger-build
+++ b/scripts/trigger-build
@@ -154,7 +154,8 @@ module Trigger
'SECURITY_SOURCES' => Trigger.security? ? 'true' : 'false',
'ee' => Trigger.ee? ? 'true' : 'false',
'QA_BRANCH' => ENV['QA_BRANCH'] || 'master',
- 'CACHE_UPDATE' => ENV['OMNIBUS_GITLAB_CACHE_UPDATE']
+ 'CACHE_UPDATE' => ENV['OMNIBUS_GITLAB_CACHE_UPDATE'],
+ 'GITLAB_QA_OPTIONS' => ENV['GITLAB_QA_OPTIONS']
}
end
end
diff --git a/scripts/used-feature-flags b/scripts/used-feature-flags
index e6a8149da71..7d81e4b2cb2 100755
--- a/scripts/used-feature-flags
+++ b/scripts/used-feature-flags
@@ -27,7 +27,8 @@ flags_paths = [
]
# For EE additionally process `ee/` feature flags
-if File.exist?('ee/app/models/license.rb') && !%w[true 1].include?(ENV['FOSS_ONLY'].to_s)
+is_ee = File.exist?('ee/app/models/license.rb') && !%w[true 1].include?(ENV['FOSS_ONLY'].to_s)
+if is_ee
flags_paths << 'ee/config/feature_flags/**/*.yml'
# Geo feature flags are constructed dynamically and there's no explicit checks in the codebase so we mark all
@@ -41,6 +42,19 @@ if File.exist?('ee/app/models/license.rb') && !%w[true 1].include?(ENV['FOSS_ONL
end
end
+# For JH additionally process `jh/` feature flags
+is_jh = is_ee && Dir.exist?('jh') && !%w[true 1].include?(ENV['EE_ONLY'].to_s)
+if is_jh
+ flags_paths << 'jh/config/feature_flags/**/*.yml'
+
+ Dir.glob('jh/app/replicators/geo/*_replicator.rb').each_with_object(Set.new) do |path, memo|
+ replicator_name = File.basename(path, '.rb')
+ feature_flag_name = "geo_#{replicator_name.delete_suffix('_replicator')}_replication"
+
+ FileUtils.touch(File.join('tmp', 'feature_flags', "#{feature_flag_name}.used"))
+ end
+end
+
all_flags = {}
additional_flags = Set.new
diff --git a/scripts/utils.sh b/scripts/utils.sh
index faabc151963..ed27edcadb2 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -172,5 +172,5 @@ function danger_as_local() {
# Force danger to skip CI source GitLab and fallback to "local only git repo".
unset GITLAB_CI
# We need to base SHA to help danger determine the base commit for this shallow clone.
- bundle exec danger dry_run --fail-on-errors=true --verbose --base="${CI_MERGE_REQUEST_DIFF_BASE_SHA}"
+ bundle exec danger dry_run --fail-on-errors=true --verbose --base="${CI_MERGE_REQUEST_DIFF_BASE_SHA}" --head="${CI_MERGE_REQUEST_SOURCE_BRANCH_SHA:-$CI_COMMIT_SHA}"
}
diff --git a/scripts/verify-tff-mapping b/scripts/verify-tff-mapping
index ee86f9ecde5..b09fd09a737 100755
--- a/scripts/verify-tff-mapping
+++ b/scripts/verify-tff-mapping
@@ -102,8 +102,8 @@ tests = [
{
explanation: 'Migration should map to its non-timestamped spec',
- source: 'db/migrate/20200116175538_update_timestamp_softwarelicensespolicy.rb',
- expected: ['spec/migrations/update_timestamp_softwarelicensespolicy_spec.rb']
+ source: 'db/migrate/20210818220234_add_default_project_approval_rules_vuln_allowed.rb',
+ expected: ['spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb']
},
{