Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2022-03-18 23:02:30 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2022-03-18 23:02:30 +0300
commit41fe97390ceddf945f3d967b8fdb3de4c66b7dea (patch)
tree9c8d89a8624828992f06d892cd2f43818ff5dcc8 /scripts
parent0804d2dc31052fb45a1efecedc8e06ce9bc32862 (diff)
Add latest changes from gitlab-org/gitlab@14-9-stable-eev14.9.0-rc42
Diffstat (limited to 'scripts')
-rwxr-xr-xscripts/generate-memory-metrics-on-boot29
-rw-r--r--scripts/gitlab_workhorse_component_helpers.sh73
-rwxr-xr-xscripts/ingest-reports-to-siem45
-rwxr-xr-xscripts/lint_templates_bash.rb76
-rwxr-xr-xscripts/merge-simplecov24
-rwxr-xr-xscripts/setup/find-jh-branch.rb2
-rwxr-xr-xscripts/trigger-build.rb4
-rw-r--r--scripts/utils.sh16
8 files changed, 229 insertions, 40 deletions
diff --git a/scripts/generate-memory-metrics-on-boot b/scripts/generate-memory-metrics-on-boot
index 945661aa057..539446f7c0c 100755
--- a/scripts/generate-memory-metrics-on-boot
+++ b/scripts/generate-memory-metrics-on-boot
@@ -1,12 +1,29 @@
#!/usr/bin/env ruby
# frozen_string_literal: true
-abort "usage: #{__FILE__} <memory_bundle_mem_file_name>" unless ARGV.length == 1
-memory_bundle_mem_file_name = ARGV.first
+abort "usage: #{__FILE__} <memory_bundle_mem_file_name_prefix> <test_count>" unless ARGV.length == 2
+memory_bundle_mem_file_name_prefix = ARGV.first
+test_count = ARGV.last.to_i
-full_report = File.open(memory_bundle_mem_file_name).read
+results = []
+(1..test_count).each do |i|
+ report_filename = "#{memory_bundle_mem_file_name_prefix}#{i}.txt"
-stats = /TOP: (?<total_mibs_str>.*) MiB/.match(full_report)
-abort 'failed to process the benchmark output' unless stats
+ stats = nil
+ File.foreach(report_filename).detect do |line|
+ stats = /TOP: (?<total_mibs_str>.*) MiB/.match(line)
+ end
+ abort 'failed to process the benchmark output' unless stats
-puts "total_memory_used_by_dependencies_on_boot_prod_env_mb #{stats[:total_mibs_str].to_f.round(1)}"
+ total_mibs = stats[:total_mibs_str].to_f
+ results << total_mibs
+end
+
+res = results.sort
+median = (res[(test_count - 1) / 2] + res[test_count / 2]) / 2.0
+
+METRIC_NAME = "total_memory_used_by_dependencies_on_boot_prod_env_mb"
+
+puts "# TYPE #{METRIC_NAME} gauge"
+puts "# UNIT #{METRIC_NAME} mebibytes"
+puts "#{METRIC_NAME} #{median.round(1)}"
diff --git a/scripts/gitlab_workhorse_component_helpers.sh b/scripts/gitlab_workhorse_component_helpers.sh
new file mode 100644
index 00000000000..06fe7b2ea51
--- /dev/null
+++ b/scripts/gitlab_workhorse_component_helpers.sh
@@ -0,0 +1,73 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+export CURL_TOKEN_HEADER="${CURL_TOKEN_HEADER:-"JOB-TOKEN"}"
+export GITLAB_WORKHORSE_BINARIES_LIST="gitlab-resize-image gitlab-zip-cat gitlab-zip-metadata gitlab-workhorse"
+export GITLAB_WORKHORSE_PACKAGE_FILES_LIST="${GITLAB_WORKHORSE_BINARIES_LIST} WORKHORSE_TREE"
+export GITLAB_WORKHORSE_TREE=${GITLAB_WORKHORSE_TREE:-$(git rev-parse HEAD:workhorse)}
+export GITLAB_WORKHORSE_PACKAGE="workhorse-${GITLAB_WORKHORSE_TREE}.tar.gz"
+export GITLAB_WORKHORSE_PACKAGE_URL="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/${GITLAB_WORKHORSE_FOLDER}/${GITLAB_WORKHORSE_TREE}/${GITLAB_WORKHORSE_PACKAGE}"
+
+function gitlab_workhorse_archive_doesnt_exist() {
+ local package_url="${GITLAB_WORKHORSE_PACKAGE_URL}"
+
+ status=$(curl -I --silent --retry 3 --output /dev/null -w "%{http_code}" "${package_url}")
+
+ [[ "${status}" != "200" ]]
+}
+
+function create_gitlab_workhorse_package() {
+ local archive_filename="${GITLAB_WORKHORSE_PACKAGE}"
+ local folder_to_archive="${GITLAB_WORKHORSE_FOLDER}"
+ local workhorse_folder_path="${TMP_TEST_GITLAB_WORKHORSE_PATH}"
+ local tar_working_folder="${TMP_TEST_FOLDER}"
+
+ echoinfo "Running 'tar -czvf ${archive_filename} -C ${tar_working_folder} ${folder_to_archive}'"
+ tar -czvf ${archive_filename} -C ${tar_working_folder} ${folder_to_archive}
+ du -h ${archive_filename}
+}
+
+function extract_gitlab_workhorse_package() {
+ local tar_working_folder="${TMP_TEST_FOLDER}"
+
+ echoinfo "Extracting archive to ${tar_working_folder}"
+
+ tar -xzv -C ${tar_working_folder} < /dev/stdin
+}
+
+function upload_gitlab_workhorse_package() {
+ local archive_filename="${GITLAB_WORKHORSE_PACKAGE}"
+ local package_url="${GITLAB_WORKHORSE_PACKAGE_URL}"
+ local token_header="${CURL_TOKEN_HEADER}"
+ local token="${CI_JOB_TOKEN}"
+
+ echoinfo "Uploading ${archive_filename} to ${package_url} ..."
+ curl --fail --silent --retry 3 --header "${token_header}: ${token}" --upload-file "${archive_filename}" "${package_url}"
+}
+
+function read_curl_gitlab_workhorse_package() {
+ local package_url="${GITLAB_WORKHORSE_PACKAGE_URL}"
+ local token_header="${CURL_TOKEN_HEADER}"
+ local token="${CI_JOB_TOKEN}"
+
+ echoinfo "Downloading from ${package_url} ..."
+
+ curl --fail --silent --retry 3 --header "${token_header}: ${token}" "${package_url}"
+}
+
+function download_and_extract_gitlab_workhorse_package() {
+ read_curl_gitlab_workhorse_package | extract_gitlab_workhorse_package
+}
+
+function select_gitlab_workhorse_essentials() {
+ local tmp_path="${CI_PROJECT_DIR}/tmp/${GITLAB_WORKHORSE_FOLDER}"
+ local original_gitlab_workhorse_path="${TMP_TEST_GITLAB_WORKHORSE_PATH}"
+
+ mkdir -p ${tmp_path}
+ cd ${original_gitlab_workhorse_path} && mv ${GITLAB_WORKHORSE_PACKAGE_FILES_LIST} ${tmp_path} && cd -
+ rm -rf ${original_gitlab_workhorse_path}
+
+ # Move the temp folder to its final destination
+ mv ${tmp_path} ${TMP_TEST_FOLDER}
+}
diff --git a/scripts/ingest-reports-to-siem b/scripts/ingest-reports-to-siem
new file mode 100755
index 00000000000..86c72e1d7eb
--- /dev/null
+++ b/scripts/ingest-reports-to-siem
@@ -0,0 +1,45 @@
+#!/usr/bin/env node
+
+const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3')
+const { fromIni } = require('@aws-sdk/credential-provider-ini')
+const path = require('path')
+const fs = require('fs')
+const crypto = require('crypto')
+
+function getMD5HashFromFile(data) {
+ const hash = crypto.createHash('md5').update(data).digest('base64')
+ return hash
+}
+
+(async function () {
+ const s3Client = new S3Client({
+ region: 'us-east-2',
+ credentials: fromIni({ profile: 'gl-logs-for-panther' }),
+ })
+ try {
+ const file = 'gl-dependency-scanning-report.json'
+ const data = fs.readFileSync(file)
+
+ const [filename, fileext] = path.basename(file).split('.')
+ const uniqueId = process.env['CI_PIPELINE_ID'] && process.env['CI_JOB_ID'] ?
+ process.env['CI_PIPELINE_ID'] + '-' + process.env['CI_JOB_ID'] :
+ Date.now()
+ const key = path.join('package_hunter_test', filename + '-' + uniqueId + '.' + fileext)
+
+ const responseData = await s3Client.send(
+ new PutObjectCommand({
+ Bucket: 'gl-logs-for-panther-test',
+ Key: key,
+ Body: data,
+ ContentMD5: getMD5HashFromFile(data),
+ }),
+ )
+ console.log('Successfully uploaded %s to %s', file, key)
+ } catch (err) {
+ if (err.name === 'CredentialsProviderError' || err.name === 'AuthorizationHeaderMalformed')
+ console.log('Could not upload the report. Are AWS credentials configured in ~/.aws/credentials?')
+ else
+ console.log('Unexpected error during upload: ', err.message)
+ process.exit(1)
+ }
+})()
diff --git a/scripts/lint_templates_bash.rb b/scripts/lint_templates_bash.rb
new file mode 100755
index 00000000000..8db9469ecdf
--- /dev/null
+++ b/scripts/lint_templates_bash.rb
@@ -0,0 +1,76 @@
+#!/usr/bin/env ruby
+# frozen_string_literal: true
+
+require_relative '../config/environment'
+require 'open3'
+
+module LintTemplatesBash
+ module_function
+
+ EXCLUDED_RULES = [
+ "SC2046", "SC2086", # will be fixed later: https://gitlab.com/gitlab-org/gitlab/-/issues/352973
+ "SC1090", "SC1091", # we do not have access to sourced files for analysis.
+ "SC2154", # Referencing undefined variables is common and adding per-line exceptions for them is unintuitive for end-users
+ "SC2164" # CI/CD automatically fails if attempting to change to a directory which does not exist.
+ ].join(",").freeze
+
+ EXCLUDED_TEMPLATES = [
+ "dotNET.gitlab-ci.yml" # Powershell
+ ].freeze
+
+ def run
+ failed_templates = Gitlab::Template::GitlabCiYmlTemplate.all.filter_map do |template|
+ next if EXCLUDED_TEMPLATES.include?(template.full_name)
+
+ success = check_template(template)
+
+ template.full_name unless success
+ end
+
+ if failed_templates.any?
+ puts "The following templates have shellcheck violations:"
+ puts failed_templates.join("\n")
+ exit 1
+ end
+ end
+
+ def process_content(content)
+ Gitlab::Ci::YamlProcessor.new(content).execute
+ end
+
+ def job_script(job)
+ parts = [:before_script, :script, :after_script].map do |key|
+ job[key]&.join("\n")
+ end.compact
+
+ parts.prepend("#!/bin/bash\n").join("\n")
+ end
+
+ def shellcheck(script_content)
+ combined_streams, status = Open3.capture2e("shellcheck --exclude='#{EXCLUDED_RULES}' -", stdin_data: script_content)
+
+ [combined_streams, status.success?]
+ end
+
+ def check_job(job)
+ shellcheck(job_script(job))
+ end
+
+ def check_template(template)
+ parsed = process_content(template.content)
+ results = parsed.jobs.map do |name, job|
+ out, success = check_job(job)
+
+ unless success
+ puts "The '#{name}' job in #{template.full_name} has shellcheck failures:"
+ puts out
+ end
+
+ success
+ end
+
+ results.all?
+ end
+end
+
+LintTemplatesBash.run
diff --git a/scripts/merge-simplecov b/scripts/merge-simplecov
index b74a416a6e0..32a0cd86f82 100755
--- a/scripts/merge-simplecov
+++ b/scripts/merge-simplecov
@@ -5,26 +5,4 @@ require_relative '../spec/simplecov_env'
SimpleCovEnv.configure_profile
SimpleCovEnv.configure_formatter
-module SimpleCov
- module ResultMerger
- class << self
- def resultset_files
- Dir.glob(File.join(SimpleCov.coverage_path, '*', '.resultset.json'))
- end
-
- def resultset_hashes
- resultset_files.map do |path|
- JSON.parse(File.read(path))
- rescue StandardError
- {}
- end
- end
-
- def resultset
- resultset_hashes.reduce({}, :merge)
- end
- end
- end
-end
-
-SimpleCov::ResultMerger.merged_result.format!
+SimpleCov.collate Dir.glob(File.join(SimpleCov.coverage_path, '*', '.resultset.json'))
diff --git a/scripts/setup/find-jh-branch.rb b/scripts/setup/find-jh-branch.rb
index 89aa1492939..a7c1cafd74c 100755
--- a/scripts/setup/find-jh-branch.rb
+++ b/scripts/setup/find-jh-branch.rb
@@ -8,7 +8,7 @@ require_relative '../api/default_options'
class FindJhBranch
JH_DEFAULT_BRANCH = 'main-jh'
- JH_PROJECT_PATH = 'gitlab-org/gitlab-jh/gitlab'
+ JH_PROJECT_PATH = 'gitlab-org/gitlab-jh-mirrors/gitlab'
BranchNotFound = Class.new(RuntimeError)
def run
diff --git a/scripts/trigger-build.rb b/scripts/trigger-build.rb
index 17cbd91a8ee..a3356c664d1 100755
--- a/scripts/trigger-build.rb
+++ b/scripts/trigger-build.rb
@@ -324,8 +324,8 @@ module Trigger
def invoke!(post_comment: false, downstream_job_name: nil)
pipeline = super
gitlab = gitlab_client(:upstream)
- project_path = base_variables['TOP_UPSTREAM_SOURCE_PROJECT']
- merge_request_id = base_variables['TOP_UPSTREAM_MERGE_REQUEST_IID']
+ project_path = variables['TOP_UPSTREAM_SOURCE_PROJECT']
+ merge_request_id = variables['TOP_UPSTREAM_MERGE_REQUEST_IID']
comment = "<!-- #{IDENTIFIABLE_NOTE_TAG} --> \nStarted database testing [pipeline](https://ops.gitlab.net/#{downstream_project_path}/-/pipelines/#{pipeline.id}) " \
"(limited access). This comment will be updated once the pipeline has finished running."
diff --git a/scripts/utils.sh b/scripts/utils.sh
index c20508617b8..e896fe40e06 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -83,7 +83,7 @@ function install_junit_merge_gem() {
function run_timed_command() {
local cmd="${1}"
- local metric_name="${2}"
+ local metric_name="${2:-no}"
local timed_metric_file
local start=$(date +%s)
@@ -97,7 +97,7 @@ function run_timed_command() {
if [[ $ret -eq 0 ]]; then
echosuccess "==> '${cmd}' succeeded in ${runtime} seconds."
- if [[ -n "${metric_name}" ]]; then
+ if [[ "${metric_name}" != "no" ]]; then
timed_metric_file=$(timed_metric_file $metric_name)
echo "# TYPE ${metric_name} gauge" > "${timed_metric_file}"
echo "# UNIT ${metric_name} seconds" >> "${timed_metric_file}"
@@ -132,9 +132,9 @@ function timed_metric_file() {
}
function echoerr() {
- local header="${2}"
+ local header="${2:-no}"
- if [ -n "${header}" ]; then
+ if [ "${header}" != "no" ]; then
printf "\n\033[0;31m** %s **\n\033[0m" "${1}" >&2;
else
printf "\033[0;31m%s\n\033[0m" "${1}" >&2;
@@ -142,9 +142,9 @@ function echoerr() {
}
function echoinfo() {
- local header="${2}"
+ local header="${2:-no}"
- if [ -n "${header}" ]; then
+ if [ "${header}" != "no" ]; then
printf "\n\033[0;33m** %s **\n\033[0m" "${1}" >&2;
else
printf "\033[0;33m%s\n\033[0m" "${1}" >&2;
@@ -152,9 +152,9 @@ function echoinfo() {
}
function echosuccess() {
- local header="${2}"
+ local header="${2:-no}"
- if [ -n "${header}" ]; then
+ if [ "${header}" != "no" ]; then
printf "\n\033[0;32m** %s **\n\033[0m" "${1}" >&2;
else
printf "\033[0;32m%s\n\033[0m" "${1}" >&2;