Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-02-18 12:45:46 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-02-18 12:45:46 +0300
commita7b3560714b4d9cc4ab32dffcd1f74a284b93580 (patch)
tree7452bd5c3545c2fa67a28aa013835fb4fa071baf /scripts
parentee9173579ae56a3dbfe5afe9f9410c65bb327ca7 (diff)
Add latest changes from gitlab-org/gitlab@14-8-stable-eev14.8.0-rc42
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/decomposition/generate-loose-foreign-key405
-rwxr-xr-xscripts/gather-test-memory-data22
-rwxr-xr-xscripts/generate-gems-memory-metrics-static19
-rwxr-xr-xscripts/generate-gems-size-metrics-static31
-rwxr-xr-xscripts/insert-rspec-profiling-data2
-rwxr-xr-xscripts/lint-doc.sh2
-rwxr-xr-xscripts/rspec_bisect_flaky10
-rw-r--r--scripts/rspec_helpers.sh156
-rwxr-xr-xscripts/setup/find-jh-branch.rb2
-rwxr-xr-xscripts/trigger-build.rb (renamed from scripts/trigger-build)100
-rw-r--r--scripts/utils.sh13
11 files changed, 605 insertions, 157 deletions
diff --git a/scripts/decomposition/generate-loose-foreign-key b/scripts/decomposition/generate-loose-foreign-key
new file mode 100755
index 00000000000..35f84c64ce1
--- /dev/null
+++ b/scripts/decomposition/generate-loose-foreign-key
@@ -0,0 +1,405 @@
+#!/usr/bin/env -S ENABLE_SPRING=0 bin/rails runner -e test
+
+# This is helper script to swap foreign key to loose foreign key
+# using DB schema
+
+require 'optparse'
+
+$options = {
+ milestone: "#{Gitlab.version_info.major}.#{Gitlab.version_info.minor}",
+ cross_schema: false,
+ dry_run: false,
+ branch: true,
+ rspec: true
+}
+
+OptionParser.new do |opts|
+ opts.banner = "Usage: #{$0} [options] <filters...>"
+
+ opts.on("-c", "--cross-schema", "Show only cross-schema foreign keys") do |v|
+ $options[:cross_schema] = v
+ end
+
+ opts.on("-n", "--dry-run", "Do not execute any commands (dry run)") do |v|
+ $options[:dry_run] = v
+ end
+
+ opts.on("-b", "--[no-]branch", "Create or not a new branch") do |v|
+ $options[:branch] = v
+ end
+
+ opts.on("-r", "--[no-]rspec", "Create or not a rspecs automatically") do |v|
+ $options[:rspec] = v
+ end
+
+ opts.on("-m", "--milestone MILESTONE", "Specify custom milestone (current: #{$options[:milestone]})") do |v|
+ $options[:milestone] = v
+ end
+
+ opts.on("-h", "--help", "Prints this help") do
+ puts opts
+ exit
+ end
+end.parse!
+
+unless system("git diff --quiet db/structure.sql")
+ raise "The db/structure.sql is changed. Reset branch or commit changes."
+end
+
+unless system("git diff --quiet")
+ raise "There are uncommitted changes. Commit to continue."
+end
+
+if Gitlab::Database.database_base_models.many?
+ raise 'Cannot run in multiple-databases mode. Use only `main:` in `config/database.yml`.'
+end
+
+puts "Re-creating current test database"
+ActiveRecord::Tasks::DatabaseTasks.drop_current
+ActiveRecord::Tasks::DatabaseTasks.create_current
+ActiveRecord::Tasks::DatabaseTasks.load_schema_current
+ActiveRecord::Tasks::DatabaseTasks.migrate
+ActiveRecord::Migration.check_pending!
+ActiveRecord::Base.connection_pool.disconnect!
+puts
+
+def exec_cmd(*args, fail: nil)
+ # output full command
+ if $options[:dry_run]
+ puts ">> #{args.shelljoin}"
+ return true
+ end
+
+ # truncate up-to 60 chars or first line
+ command = args.shelljoin
+ truncated_command = command.truncate([command.lines.first.length+3, 120].min)
+
+ puts ">> #{truncated_command}"
+ return true if system(*args)
+
+ raise fail if fail
+
+ puts "--------------------------------------------------"
+ puts "This command failed:"
+ puts ">> #{command}"
+ puts "--------------------------------------------------"
+ false
+end
+
+def has_lfk?(definition)
+ Gitlab::Database::LooseForeignKeys.definitions.any? do |lfk_definition|
+ lfk_definition.from_table == definition.from_table &&
+ lfk_definition.to_table == definition.to_table &&
+ lfk_definition.column == definition.column
+ end
+end
+
+def matching_filter?(definition, filters)
+ filters.all? do |filter|
+ definition.from_table.include?(filter) ||
+ definition.to_table.include?(filter) ||
+ definition.column.include?(filter)
+ end
+end
+
+def columns(*args)
+ puts("%5s | %7s | %40s | %20s | %30s | %15s " % args)
+end
+
+def add_definition_to_yaml(definition)
+ content = YAML.load_file(Rails.root.join('config/gitlab_loose_foreign_keys.yml'))
+ table_definitions = content[definition.from_table]
+
+ # insert new entry at random place to avoid conflicts
+ unless table_definitions
+ table_definitions = []
+ insert_idx = rand(content.count+1)
+
+ # insert at a given index in ordered hash
+ content = content.to_a
+ content.insert(insert_idx, [definition.from_table, table_definitions])
+ content = content.to_h
+ end
+
+ on_delete =
+ case definition.on_delete
+ when :cascade
+ 'async_delete'
+ when :nullify
+ 'async_nullify'
+ else
+ raise "Unsupported on_delete behavior: #{definition.on_delete}"
+ end
+
+ yaml_definition = {
+ "table" => definition.to_table,
+ "column" => definition.column,
+ "on_delete" => on_delete
+ }
+
+ # match and update by "table", "column"
+ if existing = table_definitions.pluck("table", "column").index([definition.to_table, definition.column])
+ puts "Updated existing definition from #{table_definitions[existing]} to #{yaml_definition}."
+ table_definitions[existing] = yaml_definition
+ else
+ puts "Add new definition for #{yaml_definition}."
+ table_definitions.append(yaml_definition)
+ end
+
+ # emulate existing formatting
+ File.write(
+ Rails.root.join('config/gitlab_loose_foreign_keys.yml'),
+ content.to_yaml.gsub(/^([- ] )/, ' \1')
+ )
+
+ exec_cmd("git", "add", "config/gitlab_loose_foreign_keys.yml")
+end
+
+def generate_migration(definition)
+ timestamp = Time.now.utc.strftime("%Y%m%d%H%M%S")
+
+ # db/post_migrate/20220111221516_remove_projects_ci_pending_builds_fk.rb
+
+ migration_name = "db/post_migrate/#{timestamp}_remove_#{definition.to_table}_#{definition.from_table}_#{definition.column}_fk.rb"
+ puts "Writing #{migration_name}"
+
+ content = <<-EOF.strip_heredoc
+ # frozen_string_literal: true
+
+ class Remove#{definition.to_table.camelcase}#{definition.from_table.camelcase}#{definition.column.camelcase}Fk < Gitlab::Database::Migration[1.0]
+ disable_ddl_transaction!
+
+ def up
+ return unless foreign_key_exists?(:#{definition.from_table}, :#{definition.to_table}, name: "#{definition.name}")
+
+ with_lock_retries do
+ execute('LOCK #{definition.to_table}, #{definition.from_table} IN ACCESS EXCLUSIVE MODE') if transaction_open?
+
+ remove_foreign_key_if_exists(:#{definition.from_table}, :#{definition.to_table}, name: "#{definition.name}")
+ end
+ end
+
+ def down
+ add_concurrent_foreign_key(:#{definition.from_table}, :#{definition.to_table}, name: "#{definition.name}", column: :#{definition.column}, target_column: :#{definition.primary_key}, on_delete: :#{definition.on_delete})
+ end
+ end
+ EOF
+
+ File.write(migration_name, content)
+
+ exec_cmd("git", "add", migration_name, fail: "Failed to add migration file.")
+ exec_cmd("bin/rails", "db:migrate", fail: "Failed to run db:migrate.")
+ exec_cmd("git", "add", "db/schema_migrations/#{timestamp}", "db/structure.sql", fail: "There are uncommitted changes. We should not have any.")
+ exec_cmd("git diff --exit-code --name-only", fail: "There are uncommitted changes. We should not have any.")
+end
+
+def class_by_table_name
+ @index_by_table_name ||= ActiveRecord::Base
+ .descendants
+ .reject(&:abstract_class)
+ .map(&:base_class)
+ .index_by(&:table_name)
+end
+
+def spec_from_clazz(clazz, definition)
+ %w[spec/models ee/spec/models].each do |specs_path|
+ path = File.join(specs_path, clazz.underscore + "_spec.rb")
+ return path if File.exist?(path)
+ end
+
+ raise "Cannot find specs for #{clazz} (#{definition.from_table})"
+end
+
+def add_test_to_specs(definition)
+ return unless $options[:rspec]
+
+ clazz = class_by_table_name[definition.from_table]
+ raise "Cannot map #{definition.from_table} to clazz" unless clazz
+
+ spec_path = spec_from_clazz(clazz, definition)
+ puts "Adding test to #{spec_path}..."
+
+ spec_test = <<-EOF.strip_heredoc.indent(2)
+ context 'loose foreign key on #{definition.from_table}.#{definition.column}' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:#{definition.to_table.singularize}) }
+ let!(:model) { create(:#{definition.from_table.singularize}, #{definition.column.delete_suffix("_id").singularize}: parent) }
+ end
+ end
+ EOF
+
+ # append to end of file with empty line before
+ lines = File.readlines(spec_path)
+ insert_line = lines.count - 1
+ lines.insert(insert_line, "\n", *spec_test.lines)
+ File.write(spec_path, lines.join(""))
+
+ # find a matching line
+ test_lines = (1..lines.count).select do |line|
+ lines[line-1].include?("it_behaves_like 'cleanup by a loose foreign key' do")
+ end.join(":")
+
+ loop do
+ if system("bin/rspec", "#{spec_path}:#{test_lines}")
+ puts "Test seems fine?"
+ break
+ end
+
+ puts "--------------------------------------------------"
+ puts "Test failed:"
+ puts "Edit: vim #{spec_path} (lines #{test_lines})"
+ puts "Re-run: bin/rspec #{spec_path}:#{test_lines}"
+ puts "--------------------------------------------------"
+ puts "Running bash. To exit do 'Ctrl-D' to re-run, or do 'Ctrl-C' to break (and ignore failure)."
+ puts
+
+ unless exec_cmd("bash")
+ break
+ end
+ end
+
+ exec_cmd("git", "add", spec_path, fail: "There are uncommitted changes. We should not have any.")
+end
+
+def update_no_cross_db_foreign_keys_spec(definition)
+ from_column = "#{definition.from_table}.#{definition.column}"
+ spec_path = "spec/lib/gitlab/database/no_cross_db_foreign_keys_spec.rb"
+
+ puts "Updating #{spec_path}..."
+ lines = File.readlines(spec_path)
+ updated = lines.reject { |line| line.strip == from_column }
+
+ if lines.count == updated.count
+ puts "Nothing changed."
+ return
+ end
+
+ File.write(spec_path, updated.join(""))
+ exec_cmd("git", "add", spec_path, fail: "Failed to add changes from #{spec_path}")
+end
+
+def commit_changes(definition)
+ branch_name = "remove-#{definition.to_table}_#{definition.from_table}_#{definition.column}-fk"
+ commit_title = "Swap FK #{definition.from_table} to #{definition.to_table} for LFK"
+ mr_title = "Swap FK #{definition.from_table}.#{definition.column} to #{definition.to_table} for LFK"
+ description = <<-EOF.strip_heredoc
+ Swaps FK for #{definition.from_table}.#{definition.column} to #{definition.to_table}
+
+ Changelog: changed
+ EOF
+
+ commit_message = "#{commit_title}\n\n#{description}"
+
+ existing_branch = %x[git rev-parse --abbrev-ref HEAD].strip
+
+ if $options[:branch]
+ unless exec_cmd("git", "checkout", "-b", branch_name)
+ raise "Failed to create branch: #{branch_name}"
+ end
+ end
+
+ unless exec_cmd("git", "commit", "-m", commit_message)
+ raise "Failed to commit changes."
+ end
+
+ if $options[:branch]
+ exec_cmd("git", "push", "origin", "-u", "HEAD",
+ "-o", "merge_request.create",
+ "-o", "merge_request.target=#{existing_branch}",
+ "-o", "merge_request.milestone=#{$options[:milestone]}",
+ "-o", "merge_request.title=#{mr_title}"
+ )
+
+ puts
+ puts "--------------------------------------------------"
+ puts "Put this as MR description:"
+ puts "--------------------------------------------------"
+ puts <<-EOF.strip_heredoc
+ ## What does this MR do and why?
+
+ Per https://gitlab.com/groups/gitlab-org/-/epics/7249
+
+ As part of our CI "decomposition" efforts we need to remove all foreign keys that are cross-database (ie. between the planned \`main\` and \`ci\` databases). We are going to replace them all with ["loose foreign keys"](https://docs.gitlab.com/ee/development/database/loose_foreign_keys.html).
+
+ Related: <DETAIL>
+
+ ## Validations
+
+ - **Best team to review (check off when reviewed):** TBD
+ - [ ] No way for user to access once parent is deleted. Please explain: <DETAIL>
+ - [ ] Possible to access once parent deleted but low user impact. Please explain: <DETAIL>
+ - [ ] Possible Sidekiq workers that may load directly and possibly lead to exceptions. Please explain: <DETAIL>
+ - [ ] Possible user impact to be evaluated or mitigated. Please explain: <DETAIL>
+ - [ ] Is this FK safe to be removed to avoid LOCKing problems? (Explanation: https://gitlab.com/groups/gitlab-org/-/epics/7249#note_819662046). Please explain: <DETAIL>
+
+ ## MR acceptance checklist
+
+ This checklist encourages us to confirm any changes have been analyzed to reduce risks in quality, performance, reliability, security, and maintainability.
+
+ * [ ] I have evaluated the [MR acceptance checklist](https://docs.gitlab.com/ee/development/code_review.html#acceptance-checklist) for this MR.
+
+ /label ~"ci-decomposition::phase4" ~"database::review pending" ~"devops::enablement" ~"group::sharding" ~"section::enablement" ~"sharding::active" ~"type::feature" ~"workflow::in dev" ~backend ~"ci-decomposition" ~database ~"Category:Sharding"
+ /milestone %"#{$options[:milestone]}"
+ /assign_reviewer @ahegyi
+ EOF
+ puts "--------------------------------------------------"
+ end
+end
+
+all_foreign_keys = ActiveRecord::Base.connection.tables.flat_map do |table|
+ ActiveRecord::Base.connection.foreign_keys(table)
+end
+
+# Show only cross-schema foreign keys
+if $options[:cross_schema]
+ all_foreign_keys.select! do |definition|
+ Gitlab::Database::GitlabSchema.table_schema(definition.from_table) != Gitlab::Database::GitlabSchema.table_schema(definition.to_table)
+ end
+end
+
+if $options[:cross_schema]
+ puts "Showing cross-schema foreign keys (#{all_foreign_keys.count}):"
+else
+ puts "Showing all foreign keys (#{all_foreign_keys.count}):"
+ puts "Did you meant `#{$0} --cross-schema ...`?"
+end
+
+columns("ID", "HAS_LFK", "FROM", "TO", "COLUMN", "ON_DELETE")
+all_foreign_keys.each_with_index do |definition, idx|
+ columns(idx, has_lfk?(definition) ? 'Y' : 'N', definition.from_table, definition.to_table, definition.column, definition.on_delete)
+end
+puts
+
+puts "To match FK write one or many filters to match against FROM/TO/COLUMN:"
+puts "- #{$0} <filter(s)...>"
+puts "- #{$0} ci_job_artifacts project_id"
+puts "- #{$0} dast_site_profiles_pipelines"
+puts
+
+return if ARGV.empty?
+
+puts "Loading all models..."
+# Fix bug with loading `app/models/identity/uniqueness_scopes.rb`
+require_relative Rails.root.join('app/models/identity.rb')
+
+%w[app/models/**/*.rb ee/app/models/**/*.rb].each do |filter|
+ Dir.glob(filter).each do |path|
+ require_relative Rails.root.join(path)
+ end
+end
+puts
+
+puts "Generating Loose Foreign Key for given filters: #{ARGV}"
+
+all_foreign_keys.each_with_index do |definition, idx|
+ next unless matching_filter?(definition, ARGV)
+
+ puts "Matched: #{idx} (#{definition.from_table}, #{definition.to_table}, #{definition.column})"
+
+ add_definition_to_yaml(definition)
+ generate_migration(definition)
+ add_test_to_specs(definition)
+ update_no_cross_db_foreign_keys_spec(definition)
+ commit_changes(definition)
+end
+puts
diff --git a/scripts/gather-test-memory-data b/scripts/gather-test-memory-data
deleted file mode 100755
index 3156365ac19..00000000000
--- a/scripts/gather-test-memory-data
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-require 'csv'
-
-def join_csv_files(output_path, input_paths)
- return if input_paths.empty?
-
- input_csvs = input_paths.map do |input_path|
- CSV.read(input_path, headers: true)
- end
-
- CSV.open(output_path, "w", headers: input_csvs.first.headers, write_headers: true) do |output_csv|
- input_csvs.each do |input_csv|
- input_csv.each do |line|
- output_csv << line
- end
- end
- end
-end
-
-join_csv_files('tmp/memory_test/report.csv', Dir['tmp/memory_test/*.csv'].sort)
diff --git a/scripts/generate-gems-memory-metrics-static b/scripts/generate-gems-memory-metrics-static
deleted file mode 100755
index 42191f078f1..00000000000
--- a/scripts/generate-gems-memory-metrics-static
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-abort "usage: #{__FILE__} <memory_bundle_objects_file_name>" unless ARGV.length == 1
-memory_bundle_objects_file_name = ARGV.first
-
-full_report = File.readlines(memory_bundle_objects_file_name)
-
-allocated_str = full_report[1]
-retained_str = full_report[2]
-allocated_stats = /Total allocated: (?<bytes>.*) bytes \((?<objects>.*) objects\)/.match(allocated_str)
-retained_stats = /Total retained: (?<bytes>.*) bytes \((?<objects>.*) objects\)/.match(retained_str)
-
-abort 'failed to process the benchmark output' unless allocated_stats && retained_stats
-
-puts "memory_static_objects_allocated_mb #{(allocated_stats[:bytes].to_f / (1024 * 1024)).round(1)}"
-puts "memory_static_objects_retained_mb #{(retained_stats[:bytes].to_f / (1024 * 1024)).round(1)}"
-puts "memory_static_objects_allocated_items #{allocated_stats[:objects]}"
-puts "memory_static_objects_retained_items #{retained_stats[:objects]}"
diff --git a/scripts/generate-gems-size-metrics-static b/scripts/generate-gems-size-metrics-static
deleted file mode 100755
index 2406e720916..00000000000
--- a/scripts/generate-gems-size-metrics-static
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/usr/bin/env ruby
-# frozen_string_literal: true
-
-abort "usage: #{__FILE__} <memory_bundle_mem_file_name>" unless ARGV.length == 1
-memory_bundle_mem_file_name = ARGV.first
-
-full_report = File.readlines(memory_bundle_mem_file_name)
-
-def total_size(memory_bundle_mem_report)
- stats = /TOP: (?<total_mibs_str>.*) MiB/.match(memory_bundle_mem_report.first)
- abort 'failed to process the benchmark output' unless stats
- "gem_total_size_mb #{stats[:total_mibs_str].to_f.round(1)}"
-end
-
-TOP_LEVEL_GEM_LOG_FORMAT = /^ (?<gem_name>\S.*):\s*(?<gem_size>\d[.\d]*)\s*MiB/.freeze
-def all_gems(memory_bundle_mem_report)
- memory_bundle_mem_report.map do |line|
- TOP_LEVEL_GEM_LOG_FORMAT.match(line)
- end.compact
-end
-
-def gems_as_metrics(gems_match_data)
- gems_match_data.map do |gem|
- gem_name = gem[:gem_name]
- gem_size_mb = gem[:gem_size].to_f.round(1)
- "gem_size_mb{name=\"#{gem_name}\"} #{gem_size_mb}"
- end
-end
-
-puts total_size(full_report)
-puts gems_as_metrics(all_gems(full_report)).sort(&:casecmp)
diff --git a/scripts/insert-rspec-profiling-data b/scripts/insert-rspec-profiling-data
index be25972644c..996ad78ba5f 100755
--- a/scripts/insert-rspec-profiling-data
+++ b/scripts/insert-rspec-profiling-data
@@ -43,4 +43,4 @@ def insert_data(path)
end
end
-insert_data('rspec_profiling') if ENV['RSPEC_PROFILING_POSTGRES_URL'].present?
+insert_data(ENV['RSPEC_PROFILING_FOLDER_PATH']) if ENV['RSPEC_PROFILING_POSTGRES_URL'].present?
diff --git a/scripts/lint-doc.sh b/scripts/lint-doc.sh
index a036b3f7342..aba815cdf28 100755
--- a/scripts/lint-doc.sh
+++ b/scripts/lint-doc.sh
@@ -128,7 +128,7 @@ function run_locally_or_in_docker() {
$cmd $args
elif hash docker 2>/dev/null
then
- docker run -t -v ${PWD}:/gitlab -w /gitlab --rm registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.15-vale-2.14.0-markdownlint-0.30.0 ${cmd} ${args}
+ docker run -t -v ${PWD}:/gitlab -w /gitlab --rm registry.gitlab.com/gitlab-org/gitlab-docs/lint-markdown:alpine-3.15-vale-2.15.0-markdownlint-0.31.0 ${cmd} ${args}
else
echo
echo " ✖ ERROR: '${cmd}' not found. Install '${cmd}' or Docker to proceed." >&2
diff --git a/scripts/rspec_bisect_flaky b/scripts/rspec_bisect_flaky
index efeb9bcb5a0..2ef6dedb4c2 100755
--- a/scripts/rspec_bisect_flaky
+++ b/scripts/rspec_bisect_flaky
@@ -10,20 +10,20 @@ if [ $# -eq 0 ]; then
exit
fi
-files=( $@ )
+files=( "$@" )
len=${#files[@]}
target=${files[$len-1]}
# Trap interrupts and exit instead of continuing the loop
trap "echo Exited!; exit 2;" SIGINT SIGTERM
-# Show which set of specs are running
-set -x
+# Show which set of specs are running and exit immediately if they fail.
+set -xe
# Do the speedy case first, run each spec with our failing spec
for file in "${files[@]}"; do
- bin/rspec $file $target
+ bin/rspec "$file" "$target"
done
# Do a full bisect given we did not find candidates with speedy cases
-bin/rspec --bisect=verbose $@
+bin/rspec --bisect=verbose "$@"
diff --git a/scripts/rspec_helpers.sh b/scripts/rspec_helpers.sh
index 2a6eb91a1f3..af09d6d0edd 100644
--- a/scripts/rspec_helpers.sh
+++ b/scripts/rspec_helpers.sh
@@ -1,15 +1,18 @@
#!/usr/bin/env bash
function retrieve_tests_metadata() {
- mkdir -p $(dirname "$KNAPSACK_RSPEC_SUITE_REPORT_PATH") $(dirname "$FLAKY_RSPEC_SUITE_REPORT_PATH") rspec_profiling/
+ mkdir -p $(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}") $(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}") "${RSPEC_PROFILING_FOLDER_PATH}"
if [[ -n "${RETRIEVE_TESTS_METADATA_FROM_PAGES}" ]]; then
if [[ ! -f "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ]]; then
- curl --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
+ curl --location -o "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ||
+ echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
fi
if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
- curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
+ curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
+ curl --location -o "${FLAKY_RSPEC_SUITE_REPORT_PATH}" "https://gitlab-org.gitlab.io/gitlab/rspec_flaky/report-suite.json" || # temporary back-compat
+ echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
fi
else
# ${CI_DEFAULT_BRANCH} might not be master in other forks but we want to
@@ -31,7 +34,14 @@ function retrieve_tests_metadata() {
fi
if [[ ! -f "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ]]; then
- scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${FLAKY_RSPEC_SUITE_REPORT_PATH}" || echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
+ scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ||
+ scripts/api/download_job_artifact.rb --endpoint "https://gitlab.com/api/v4" --project "${project_path}" --job-id "${test_metadata_job_id}" --artifact-path "rspec_flaky/report-suite.json" || # temporary back-compat
+ echo "{}" > "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
+
+ # temporary back-compat
+ if [[ -f "rspec_flaky/report-suite.json" ]]; then
+ mv "rspec_flaky/report-suite.json" "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
+ fi
fi
else
echo "test_metadata_job_id couldn't be found!"
@@ -42,21 +52,24 @@ function retrieve_tests_metadata() {
}
function update_tests_metadata() {
+ local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
+ local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/"
+
echo "{}" > "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}"
- scripts/merge-reports "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" knapsack/rspec*.json
- rm -f knapsack/rspec*.json
+ scripts/merge-reports "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" ${knapsack_folder_path}rspec*.json
export FLAKY_RSPEC_GENERATE_REPORT="true"
- scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" rspec_flaky/all_*.json
+ scripts/merge-reports "${FLAKY_RSPEC_SUITE_REPORT_PATH}" ${rspec_flaky_folder_path}all_*.json
scripts/flaky_examples/prune-old-flaky-examples "${FLAKY_RSPEC_SUITE_REPORT_PATH}"
- rm -f rspec_flaky/all_*.json rspec_flaky/new_*.json
if [[ "$CI_PIPELINE_SOURCE" == "schedule" ]]; then
scripts/insert-rspec-profiling-data
else
echo "Not inserting profiling data as the pipeline is not a scheduled one."
fi
+
+ cleanup_individual_job_reports
}
function retrieve_tests_mapping() {
@@ -158,12 +171,20 @@ function retrieve_previous_failed_tests() {
scripts/failed_tests.rb --previous-tests-report-path "${pipeline_report_path}" --output-directory "${directory_for_output_reports}" --rspec-pg-regex "${rspec_pg_regex}" --rspec-ee-pg-regex "${rspec_ee_pg_regex}"
}
-function rspec_simple_job() {
+function rspec_args() {
local rspec_opts="${1}"
+ local junit_report_file="${2:-${JUNIT_RESULT_FILE}}"
+
+ echo "-Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out ${junit_report_file} ${rspec_opts}"
+}
+function rspec_simple_job() {
export NO_KNAPSACK="1"
- eval "bin/rspec -Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out junit_rspec.xml ${rspec_opts}"
+ local rspec_cmd="bin/rspec $(rspec_args "${1}" "${2}")"
+ echoinfo "Running RSpec command: ${rspec_cmd}"
+
+ eval "${rspec_cmd}"
}
function rspec_db_library_code() {
@@ -172,6 +193,26 @@ function rspec_db_library_code() {
rspec_simple_job "-- ${db_files}"
}
+function debug_rspec_variables() {
+ echoinfo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY}"
+ echoinfo "RETRY_FAILED_TESTS_IN_NEW_PROCESS: ${RETRY_FAILED_TESTS_IN_NEW_PROCESS}"
+
+ echoinfo "KNAPSACK_GENERATE_REPORT: ${KNAPSACK_GENERATE_REPORT}"
+ echoinfo "FLAKY_RSPEC_GENERATE_REPORT: ${FLAKY_RSPEC_GENERATE_REPORT}"
+
+ echoinfo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN}"
+ echoinfo "KNAPSACK_LOG_LEVEL: ${KNAPSACK_LOG_LEVEL}"
+ echoinfo "KNAPSACK_REPORT_PATH: ${KNAPSACK_REPORT_PATH}"
+
+ echoinfo "FLAKY_RSPEC_SUITE_REPORT_PATH: ${FLAKY_RSPEC_SUITE_REPORT_PATH}"
+ echoinfo "FLAKY_RSPEC_REPORT_PATH: ${FLAKY_RSPEC_REPORT_PATH}"
+ echoinfo "NEW_FLAKY_RSPEC_REPORT_PATH: ${NEW_FLAKY_RSPEC_REPORT_PATH}"
+ echoinfo "SKIPPED_FLAKY_TESTS_REPORT_PATH: ${SKIPPED_FLAKY_TESTS_REPORT_PATH}"
+ echoinfo "RETRIED_TESTS_REPORT_PATH: ${RETRIED_TESTS_REPORT_PATH}"
+
+ echoinfo "CRYSTALBALL: ${CRYSTALBALL}"
+}
+
function rspec_paralellized_job() {
read -ra job_name <<< "${CI_JOB_NAME}"
local test_tool="${job_name[0]}"
@@ -179,6 +220,9 @@ function rspec_paralellized_job() {
local report_name=$(echo "${CI_JOB_NAME}" | sed -E 's|[/ ]|_|g') # e.g. 'rspec unit pg12 1/24' would become 'rspec_unit_pg12_1_24'
local rspec_opts="${1}"
local spec_folder_prefixes=""
+ local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
+ local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/"
+ local rspec_run_status=0
if [[ "${test_tool}" =~ "-ee" ]]; then
spec_folder_prefixes="'ee/'"
@@ -193,7 +237,7 @@ function rspec_paralellized_job() {
fi
export KNAPSACK_LOG_LEVEL="debug"
- export KNAPSACK_REPORT_PATH="knapsack/${report_name}_report.json"
+ export KNAPSACK_REPORT_PATH="${knapsack_folder_path}${report_name}_report.json"
# There's a bug where artifacts are sometimes not downloaded. Since specs can run without the Knapsack report, we can
# handle the missing artifact gracefully here. See https://gitlab.com/gitlab-org/gitlab/-/issues/212349.
@@ -203,21 +247,15 @@ function rspec_paralellized_job() {
cp "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}" "${KNAPSACK_REPORT_PATH}"
- if [[ -z "${KNAPSACK_TEST_FILE_PATTERN}" ]]; then
- pattern=$(ruby -r./tooling/quality/test_level.rb -e "puts Quality::TestLevel.new(${spec_folder_prefixes}).pattern(:${test_level})")
- export KNAPSACK_TEST_FILE_PATTERN="${pattern}"
- fi
-
- echo "KNAPSACK_TEST_FILE_PATTERN: ${KNAPSACK_TEST_FILE_PATTERN}"
- echo "SKIP_FLAKY_TESTS_AUTOMATICALLY: ${SKIP_FLAKY_TESTS_AUTOMATICALLY}"
+ export KNAPSACK_TEST_FILE_PATTERN=$(ruby -r./tooling/quality/test_level.rb -e "puts Quality::TestLevel.new(${spec_folder_prefixes}).pattern(:${test_level})")
+ export FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}all_${report_name}_report.json"
+ export NEW_FLAKY_RSPEC_REPORT_PATH="${rspec_flaky_folder_path}new_${report_name}_report.json"
+ export SKIPPED_FLAKY_TESTS_REPORT_PATH="${rspec_flaky_folder_path}skipped_flaky_tests_${report_name}_report.txt"
+ export RETRIED_TESTS_REPORT_PATH="${rspec_flaky_folder_path}retried_tests_${report_name}_report.txt"
if [[ -d "ee/" ]]; then
export KNAPSACK_GENERATE_REPORT="true"
export FLAKY_RSPEC_GENERATE_REPORT="true"
- export SUITE_FLAKY_RSPEC_REPORT_PATH="${FLAKY_RSPEC_SUITE_REPORT_PATH}"
- export FLAKY_RSPEC_REPORT_PATH="rspec_flaky/all_${report_name}_report.json"
- export NEW_FLAKY_RSPEC_REPORT_PATH="rspec_flaky/new_${report_name}_report.json"
- export SKIPPED_FLAKY_TESTS_REPORT_PATH="rspec_flaky/skipped_flaky_tests_${report_name}_report.txt"
if [[ ! -f $FLAKY_RSPEC_REPORT_PATH ]]; then
echo "{}" > "${FLAKY_RSPEC_REPORT_PATH}"
@@ -228,19 +266,52 @@ function rspec_paralellized_job() {
fi
fi
- mkdir -p tmp/memory_test
+ debug_rspec_variables
- export MEMORY_TEST_PATH="tmp/memory_test/${report_name}_memory.csv"
+ if [[ -n $RSPEC_TESTS_MAPPING_ENABLED ]]; then
+ tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" --filter "tmp/matching_tests.txt" || rspec_run_status=$?
+ else
+ tooling/bin/parallel_rspec --rspec_args "$(rspec_args "${rspec_opts}")" || rspec_run_status=$?
+ fi
- local rspec_args="-Ispec -rspec_helper --color --format documentation --format RspecJunitFormatter --out junit_rspec.xml ${rspec_opts}"
+ echoinfo "RSpec exited with ${rspec_run_status}."
- if [[ -n $RSPEC_TESTS_MAPPING_ENABLED ]]; then
- tooling/bin/parallel_rspec --rspec_args "${rspec_args}" --filter "tmp/matching_tests.txt"
+ # Experiment to retry failed examples in a new RSpec process: https://gitlab.com/gitlab-org/quality/team-tasks/-/issues/1148
+ if [[ $rspec_run_status -ne 0 ]]; then
+ if [[ "${RETRY_FAILED_TESTS_IN_NEW_PROCESS}" == "true" ]]; then
+ retry_failed_rspec_examples
+ rspec_run_status=$?
+ fi
else
- tooling/bin/parallel_rspec --rspec_args "${rspec_args}"
+ echosuccess "No examples to retry, congrats!"
fi
- date
+ exit $rspec_run_status
+}
+
+function retry_failed_rspec_examples() {
+ local rspec_run_status=0
+
+ # Keep track of the tests that are retried, later consolidated in a single file by the `rspec:flaky-tests-report` job
+ local failed_examples=$(grep " failed" ${RSPEC_LAST_RUN_RESULTS_FILE})
+ echo "${CI_JOB_URL}" > "${RETRIED_TESTS_REPORT_PATH}"
+ echo $failed_examples >> "${RETRIED_TESTS_REPORT_PATH}"
+
+ echoinfo "Retrying the failing examples in a new RSpec process..."
+
+ install_junit_merge_gem
+
+ # Disable Crystalball on retry to not overwrite the existing report
+ export CRYSTALBALL="false"
+
+ # Retry only the tests that failed on first try
+ rspec_simple_job "--only-failures --pattern \"${KNAPSACK_TEST_FILE_PATTERN}\"" "${JUNIT_RETRY_FILE}"
+ rspec_run_status=$?
+
+ # Merge the JUnit report from retry into the first-try report
+ junit_merge "${JUNIT_RETRY_FILE}" "${JUNIT_RESULT_FILE}"
+
+ exit $rspec_run_status
}
function rspec_rerun_previous_failed_tests() {
@@ -330,3 +401,30 @@ function generate_frontend_fixtures_mapping() {
rspec_simple_job "--pattern \"${pattern}\""
}
+
+function cleanup_individual_job_reports() {
+ local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
+ local knapsack_folder_path="$(dirname "${KNAPSACK_RSPEC_SUITE_REPORT_PATH}")/"
+
+ rm -rf ${knapsack_folder_path}rspec*.json \
+ ${rspec_flaky_folder_path}all_*.json \
+ ${rspec_flaky_folder_path}new_*.json \
+ ${rspec_flaky_folder_path}skipped_flaky_tests_*_report.txt \
+ ${rspec_flaky_folder_path}retried_tests_*_report.txt \
+ ${RSPEC_LAST_RUN_RESULTS_FILE} \
+ ${RSPEC_PROFILING_FOLDER_PATH}/**/*
+ rmdir ${RSPEC_PROFILING_FOLDER_PATH} || true
+}
+
+function generate_flaky_tests_reports() {
+ local rspec_flaky_folder_path="$(dirname "${FLAKY_RSPEC_SUITE_REPORT_PATH}")/"
+
+ debug_rspec_variables
+
+ mkdir -p ${rspec_flaky_folder_path}
+
+ find ${rspec_flaky_folder_path} -type f -name 'skipped_flaky_tests_*_report.txt' -exec cat {} + >> "${SKIPPED_FLAKY_TESTS_REPORT_PATH}"
+ find ${rspec_flaky_folder_path} -type f -name 'retried_tests_*_report.txt' -exec cat {} + >> "${RETRIED_TESTS_REPORT_PATH}"
+
+ cleanup_individual_job_reports
+}
diff --git a/scripts/setup/find-jh-branch.rb b/scripts/setup/find-jh-branch.rb
index 812e1c210f4..89aa1492939 100755
--- a/scripts/setup/find-jh-branch.rb
+++ b/scripts/setup/find-jh-branch.rb
@@ -8,7 +8,7 @@ require_relative '../api/default_options'
class FindJhBranch
JH_DEFAULT_BRANCH = 'main-jh'
- JH_PROJECT_PATH = 'gitlab-jh/gitlab'
+ JH_PROJECT_PATH = 'gitlab-org/gitlab-jh/gitlab'
BranchNotFound = Class.new(RuntimeError)
def run
diff --git a/scripts/trigger-build b/scripts/trigger-build.rb
index d40e8de5a1f..17cbd91a8ee 100755
--- a/scripts/trigger-build
+++ b/scripts/trigger-build.rb
@@ -21,6 +21,12 @@ module Trigger
variable_value
end
+ def self.variables_for_env_file(variables)
+ variables.map do |key, value|
+ %Q(#{key}=#{value})
+ end.join("\n")
+ end
+
class Base
# Can be overridden
def self.access_token
@@ -57,6 +63,21 @@ module Trigger
end
end
+ def variables
+ simple_forwarded_variables.merge(base_variables, extra_variables, version_file_variables)
+ end
+
+ def simple_forwarded_variables
+ {
+ 'TRIGGER_SOURCE' => ENV['CI_JOB_URL'],
+ 'TOP_UPSTREAM_SOURCE_PROJECT' => ENV['CI_PROJECT_PATH'],
+ 'TOP_UPSTREAM_SOURCE_REF' => ENV['CI_COMMIT_REF_NAME'],
+ 'TOP_UPSTREAM_SOURCE_JOB' => ENV['CI_JOB_URL'],
+ 'TOP_UPSTREAM_MERGE_REQUEST_PROJECT_ID' => ENV['CI_MERGE_REQUEST_PROJECT_ID'],
+ 'TOP_UPSTREAM_MERGE_REQUEST_IID' => ENV['CI_MERGE_REQUEST_IID']
+ }
+ end
+
private
# Override to trigger and work with pipeline on different GitLab instance
@@ -95,23 +116,13 @@ module Trigger
ENV[version_file]&.strip || File.read(version_file).strip
end
- def variables
- base_variables.merge(extra_variables).merge(version_file_variables)
- end
-
def base_variables
# Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA for omnibus checkouts due to pipeline for merged results,
# and fallback to CI_COMMIT_SHA for the `detached` pipelines.
{
'GITLAB_REF_SLUG' => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : ENV['CI_COMMIT_REF_SLUG'],
'TRIGGERED_USER' => ENV['TRIGGERED_USER'] || ENV['GITLAB_USER_NAME'],
- 'TRIGGER_SOURCE' => ENV['CI_JOB_URL'],
- 'TOP_UPSTREAM_SOURCE_PROJECT' => ENV['CI_PROJECT_PATH'],
- 'TOP_UPSTREAM_SOURCE_JOB' => ENV['CI_JOB_URL'],
- 'TOP_UPSTREAM_SOURCE_SHA' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA'],
- 'TOP_UPSTREAM_SOURCE_REF' => ENV['CI_COMMIT_REF_NAME'],
- 'TOP_UPSTREAM_MERGE_REQUEST_PROJECT_ID' => ENV['CI_MERGE_REQUEST_PROJECT_ID'],
- 'TOP_UPSTREAM_MERGE_REQUEST_IID' => ENV['CI_MERGE_REQUEST_IID']
+ 'TOP_UPSTREAM_SOURCE_SHA' => Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
}
end
@@ -163,17 +174,16 @@ module Trigger
end
class CNG < Base
- def self.access_token
- # Default to "Multi-pipeline (from 'gitlab-org/gitlab' 'cloud-native-image' job)" at https://gitlab.com/gitlab-org/build/CNG/-/settings/access_tokens
- ENV['CNG_PROJECT_ACCESS_TOKEN'] || super
+ def variables
+ # Delete variables that aren't useful when using native triggers.
+ super.tap do |hash|
+ hash.delete('TRIGGER_SOURCE')
+ hash.delete('TRIGGERED_USER')
+ end
end
private
- def downstream_project_path
- ENV.fetch('CNG_PROJECT_PATH', 'gitlab-org/build/CNG')
- end
-
def ref
return ENV['CI_COMMIT_REF_NAME'] if ENV['CI_COMMIT_REF_NAME'] =~ /^[\d-]+-stable(-ee)?$/
@@ -181,17 +191,17 @@ module Trigger
end
def extra_variables
- edition = Trigger.ee? ? 'EE' : 'CE'
# Use CI_MERGE_REQUEST_SOURCE_BRANCH_SHA (MR HEAD commit) so that the image is in sync with the assets and QA images.
source_sha = Trigger.non_empty_variable_value('CI_MERGE_REQUEST_SOURCE_BRANCH_SHA') || ENV['CI_COMMIT_SHA']
{
- "ee" => Trigger.ee? ? "true" : "false",
+ "TRIGGER_BRANCH" => ref,
"GITLAB_VERSION" => source_sha,
- "GITLAB_TAG" => ENV['CI_COMMIT_TAG'],
+ "GITLAB_TAG" => ENV['CI_COMMIT_TAG'], # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
"GITLAB_ASSETS_TAG" => ENV['CI_COMMIT_TAG'] ? ENV['CI_COMMIT_REF_NAME'] : source_sha,
"FORCE_RAILS_IMAGE_BUILDS" => 'true',
- "#{edition}_PIPELINE" => 'true'
+ "CE_PIPELINE" => Trigger.ee? ? nil : "true", # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
+ "EE_PIPELINE" => Trigger.ee? ? "true" : nil # Always set a value, even an empty string, so that the downstream pipeline can correctly check it.
}
end
@@ -445,28 +455,30 @@ module Trigger
Job = Class.new(Pipeline)
end
-case ARGV[0]
-when 'omnibus'
- Trigger::Omnibus.new.invoke!(post_comment: true, downstream_job_name: 'Trigger:qa-test').wait!
-when 'cng'
- Trigger::CNG.new.invoke!.wait!
-when 'gitlab-com-database-testing'
- Trigger::DatabaseTesting.new.invoke!
-when 'docs'
- docs_trigger = Trigger::Docs.new
-
- case ARGV[1]
- when 'deploy'
- docs_trigger.deploy!
- when 'cleanup'
- docs_trigger.cleanup!
+if $0 == __FILE__
+ case ARGV[0]
+ when 'omnibus'
+ Trigger::Omnibus.new.invoke!(post_comment: true, downstream_job_name: 'Trigger:qa-test').wait!
+ when 'cng'
+ Trigger::CNG.new.invoke!.wait!
+ when 'gitlab-com-database-testing'
+ Trigger::DatabaseTesting.new.invoke!
+ when 'docs'
+ docs_trigger = Trigger::Docs.new
+
+ case ARGV[1]
+ when 'deploy'
+ docs_trigger.deploy!
+ when 'cleanup'
+ docs_trigger.cleanup!
+ else
+ puts 'usage: trigger-build docs <deploy|cleanup>'
+ exit 1
+ end
else
- puts 'usage: trigger-build docs <deploy|cleanup>'
- exit 1
+ puts "Please provide a valid option:
+ omnibus - Triggers a pipeline that builds the omnibus-gitlab package
+ cng - Triggers a pipeline that builds images used by the GitLab helm chart
+ gitlab-com-database-testing - Triggers a pipeline that tests database changes on GitLab.com data"
end
-else
- puts "Please provide a valid option:
- omnibus - Triggers a pipeline that builds the omnibus-gitlab package
- cng - Triggers a pipeline that builds images used by the GitLab helm chart
- gitlab-com-database-testing - Triggers a pipeline that tests database changes on GitLab.com data"
end
diff --git a/scripts/utils.sh b/scripts/utils.sh
index 15047d35fc3..c20508617b8 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -36,6 +36,7 @@ function bundle_install_script() {
exit 1;
fi;
+ gem install bundler --no-document --conservative --version 2.3.6
bundle --version
bundle config set path "$(pwd)/vendor"
bundle config set clean 'true'
@@ -64,16 +65,20 @@ function setup_db() {
}
function install_api_client_dependencies_with_apk() {
- apk add --update openssl curl jq
+ run_timed_command "apk add --update openssl curl jq"
}
function install_gitlab_gem() {
- gem install httparty --no-document --version 0.18.1
- gem install gitlab --no-document --version 4.17.0
+ run_timed_command "gem install httparty --no-document --version 0.18.1"
+ run_timed_command "gem install gitlab --no-document --version 4.17.0"
}
function install_tff_gem() {
- gem install test_file_finder --version 0.1.1
+ run_timed_command "gem install test_file_finder --no-document --version 0.1.1"
+}
+
+function install_junit_merge_gem() {
+ run_timed_command "gem install junit_merge --no-document --version 0.1.2"
}
function run_timed_command() {