From ee664acb356f8123f4f6b00b73c1e1cf0866c7fb Mon Sep 17 00:00:00 2001 From: GitLab Bot Date: Thu, 20 Oct 2022 09:40:42 +0000 Subject: Add latest changes from gitlab-org/gitlab@15-5-stable-ee --- lib/tasks/gitlab/assets.rake | 103 +++++++++++++++------------- lib/tasks/gitlab/backup.rake | 128 ++++++++++++++++++++++++++++------- lib/tasks/gitlab/db.rake | 44 +++++++++--- lib/tasks/gitlab/db/lock_writes.rake | 3 +- lib/tasks/gitlab/openapi.rake | 23 +++++++ lib/tasks/gitlab/seed.rake | 35 ++++++++++ lib/tasks/gitlab/snippets.rake | 2 +- lib/tasks/gitlab/tw/codeowners.rake | 19 +++--- lib/tasks/gitlab/usage_data.rake | 45 +++++++----- 9 files changed, 292 insertions(+), 110 deletions(-) create mode 100644 lib/tasks/gitlab/openapi.rake (limited to 'lib/tasks') diff --git a/lib/tasks/gitlab/assets.rake b/lib/tasks/gitlab/assets.rake index 76ee5379213..b58d9473794 100644 --- a/lib/tasks/gitlab/assets.rake +++ b/lib/tasks/gitlab/assets.rake @@ -5,28 +5,52 @@ require 'fileutils' module Tasks module Gitlab module Assets - FOSS_ASSET_FOLDERS = %w[app/assets fixtures/emojis vendor/assets/javascripts].freeze + FOSS_ASSET_FOLDERS = %w[app/assets fixtures/emojis vendor/assets].freeze EE_ASSET_FOLDERS = %w[ee/app/assets].freeze JH_ASSET_FOLDERS = %w[jh/app/assets].freeze - JS_ASSET_PATTERNS = %w[*.js config/**/*.js].freeze - JS_ASSET_FILES = %w[package.json yarn.lock].freeze - MASTER_SHA256_HASH_FILE = 'master-assets-hash.txt' - HEAD_SHA256_HASH_FILE = 'assets-hash.txt' - PUBLIC_ASSETS_WEBPACK_DIR = 'public/assets/webpack' + # In the new caching strategy, we check the assets hash sum *before* compiling + # the app/assets/javascripts/locale/**/app.js files. That means the hash sum + # must depend on locale/**/gitlab.po. + JS_ASSET_PATTERNS = %w[*.js config/**/*.js locale/**/gitlab.po].freeze + JS_ASSET_FILES = %w[ + package.json + yarn.lock + babel.config.js + config/webpack.config.js + ].freeze + EXCLUDE_PATTERNS = %w[ + app/assets/javascripts/locale/**/app.js + ].freeze + PUBLIC_ASSETS_DIR = 'public/assets' + HEAD_ASSETS_SHA256_HASH_ENV = 'GITLAB_ASSETS_HASH' + CACHED_ASSETS_SHA256_HASH_FILE = 'cached-assets-hash.txt' + + def self.master_assets_sha256 + @master_assets_sha256 ||= + if File.exist?(Tasks::Gitlab::Assets::CACHED_ASSETS_SHA256_HASH_FILE) + File.read(Tasks::Gitlab::Assets::CACHED_ASSETS_SHA256_HASH_FILE) + else + 'missing!' + end + end + + def self.head_assets_sha256 + @head_assets_sha256 ||= ENV.fetch(Tasks::Gitlab::Assets::HEAD_ASSETS_SHA256_HASH_ENV) do + Tasks::Gitlab::Assets.sha256_of_assets_impacting_compilation(verbose: false) + end + end - def self.sha256_of_assets_impacting_webpack_compilation + def self.sha256_of_assets_impacting_compilation(verbose: true) start_time = Time.now - asset_files = assets_impacting_webpack_compilation - puts "Generating the SHA256 hash for #{assets_impacting_webpack_compilation.size} Webpack-related assets..." + asset_files = assets_impacting_compilation + puts "Generating the SHA256 hash for #{asset_files.size} Webpack-related assets..." if verbose - asset_file_sha256s = asset_files.map do |asset_file| - Digest::SHA256.file(asset_file).hexdigest - end + assets_sha256 = asset_files.map { |asset_file| Digest::SHA256.file(asset_file).hexdigest }.join - Digest::SHA256.hexdigest(asset_file_sha256s.join).tap { |sha256| puts "=> SHA256 generated in #{Time.now - start_time}: #{sha256}" } + Digest::SHA256.hexdigest(assets_sha256).tap { |sha256| puts "=> SHA256 generated in #{Time.now - start_time}: #{sha256}" if verbose } end - def self.assets_impacting_webpack_compilation + def self.assets_impacting_compilation assets_folders = FOSS_ASSET_FOLDERS assets_folders += EE_ASSET_FOLDERS if ::Gitlab.ee? assets_folders += JH_ASSET_FOLDERS if ::Gitlab.jh? @@ -38,52 +62,34 @@ module Tasks asset_files.concat(Dir.glob(["#{folder}/**/*.*"])) end - asset_files + asset_files - Dir.glob(EXCLUDE_PATTERNS) end - - private_class_method :assets_impacting_webpack_compilation + private_class_method :assets_impacting_compilation end end end namespace :gitlab do namespace :assets do + desc 'GitLab | Assets | Return the hash sum of all frontend assets' + task :hash_sum do + print Tasks::Gitlab::Assets.sha256_of_assets_impacting_compilation(verbose: false) + end + desc 'GitLab | Assets | Compile all frontend assets' task :compile do require_dependency 'gitlab/task_helpers' - %w[ - yarn:check - gettext:po_to_json - rake:assets:precompile - gitlab:assets:compile_webpack_if_needed - gitlab:assets:fix_urls - gitlab:assets:check_page_bundle_mixins_css_for_sideeffects - ].each(&::Gitlab::TaskHelpers.method(:invoke_and_time_task)) - end - - desc 'GitLab | Assets | Compile all Webpack assets' - task :compile_webpack_if_needed do - FileUtils.mv(Tasks::Gitlab::Assets::HEAD_SHA256_HASH_FILE, Tasks::Gitlab::Assets::MASTER_SHA256_HASH_FILE, force: true) - - master_assets_sha256 = - if File.exist?(Tasks::Gitlab::Assets::MASTER_SHA256_HASH_FILE) - File.read(Tasks::Gitlab::Assets::MASTER_SHA256_HASH_FILE) - else - 'missing!' - end + puts "Assets SHA256 for `master`: #{Tasks::Gitlab::Assets.master_assets_sha256.inspect}" + puts "Assets SHA256 for `HEAD`: #{Tasks::Gitlab::Assets.head_assets_sha256.inspect}" - head_assets_sha256 = Tasks::Gitlab::Assets.sha256_of_assets_impacting_webpack_compilation.tap do |sha256| - File.write(Tasks::Gitlab::Assets::HEAD_SHA256_HASH_FILE, sha256) - end - - puts "Webpack assets SHA256 for `master`: #{master_assets_sha256}" - puts "Webpack assets SHA256 for `HEAD`: #{head_assets_sha256}" + if Tasks::Gitlab::Assets.head_assets_sha256 != Tasks::Gitlab::Assets.master_assets_sha256 + FileUtils.rm_r(Tasks::Gitlab::Assets::PUBLIC_ASSETS_DIR) if Dir.exist?(Tasks::Gitlab::Assets::PUBLIC_ASSETS_DIR) - public_assets_webpack_dir_exists = Dir.exist?(Tasks::Gitlab::Assets::PUBLIC_ASSETS_WEBPACK_DIR) - - if head_assets_sha256 != master_assets_sha256 || !public_assets_webpack_dir_exists - FileUtils.rm_r(Tasks::Gitlab::Assets::PUBLIC_ASSETS_WEBPACK_DIR) if public_assets_webpack_dir_exists + # gettext:po_to_json needs to run before rake:assets:precompile because + # app/assets/javascripts/locale/**/app.js are pre-compiled by Sprockets + Gitlab::TaskHelpers.invoke_and_time_task('gettext:po_to_json') + Gitlab::TaskHelpers.invoke_and_time_task('rake:assets:precompile') log_path = ENV['WEBPACK_COMPILE_LOG_PATH'] @@ -96,6 +102,9 @@ namespace :gitlab do puts "Written webpack stdout log to #{log_path}" if log_path puts "You can inspect the webpack log here: #{ENV['CI_JOB_URL']}/artifacts/file/#{log_path}" if log_path && ENV['CI_JOB_URL'] + + Gitlab::TaskHelpers.invoke_and_time_task('gitlab:assets:fix_urls') + Gitlab::TaskHelpers.invoke_and_time_task('gitlab:assets:check_page_bundle_mixins_css_for_sideeffects') end end diff --git a/lib/tasks/gitlab/backup.rake b/lib/tasks/gitlab/backup.rake index ff43a36d930..6647a10898f 100644 --- a/lib/tasks/gitlab/backup.rake +++ b/lib/tasks/gitlab/backup.rake @@ -4,121 +4,168 @@ require 'active_record/fixtures' namespace :gitlab do namespace :backup do + PID = Process.pid.freeze + PID_FILE = "#{Rails.application.root}/tmp/backup_restore.pid" + # Create backup of GitLab system desc 'GitLab | Backup | Create a backup of the GitLab system' task create: :gitlab_environment do - warn_user_is_not_gitlab + lock do + warn_user_is_not_gitlab - Backup::Manager.new(progress).create + Backup::Manager.new(progress).create + end end # Restore backup of GitLab system desc 'GitLab | Backup | Restore a previously created backup' task restore: :gitlab_environment do - warn_user_is_not_gitlab + lock do + warn_user_is_not_gitlab - Backup::Manager.new(progress).restore + Backup::Manager.new(progress).restore + end end namespace :repo do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('repositories') + lock do + Backup::Manager.new(progress).run_create_task('repositories') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('repositories') + lock do + Backup::Manager.new(progress).run_restore_task('repositories') + end end end namespace :db do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('main_db') - Backup::Manager.new(progress).run_create_task('ci_db') + lock do + Backup::Manager.new(progress).run_create_task('main_db') + Backup::Manager.new(progress).run_create_task('ci_db') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('main_db') - Backup::Manager.new(progress).run_restore_task('ci_db') + lock do + Backup::Manager.new(progress).run_restore_task('main_db') + Backup::Manager.new(progress).run_restore_task('ci_db') + end end end namespace :builds do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('builds') + lock do + Backup::Manager.new(progress).run_create_task('builds') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('builds') + lock do + Backup::Manager.new(progress).run_restore_task('builds') + end end end namespace :uploads do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('uploads') + lock do + Backup::Manager.new(progress).run_create_task('uploads') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('uploads') + lock do + Backup::Manager.new(progress).run_restore_task('uploads') + end end end namespace :artifacts do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('artifacts') + lock do + Backup::Manager.new(progress).run_create_task('artifacts') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('artifacts') + lock do + Backup::Manager.new(progress).run_restore_task('artifacts') + end end end namespace :pages do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('pages') + lock do + Backup::Manager.new(progress).run_create_task('pages') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('pages') + lock do + Backup::Manager.new(progress).run_restore_task('pages') + end end end namespace :lfs do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('lfs') + lock do + Backup::Manager.new(progress).run_create_task('lfs') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('lfs') + lock do + Backup::Manager.new(progress).run_restore_task('lfs') + end end end namespace :terraform_state do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('terraform_state') + lock do + Backup::Manager.new(progress).run_create_task('terraform_state') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('terraform_state') + lock do + Backup::Manager.new(progress).run_restore_task('terraform_state') + end end end namespace :registry do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('registry') + lock do + Backup::Manager.new(progress).run_create_task('registry') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('registry') + lock do + Backup::Manager.new(progress).run_restore_task('registry') + end end end namespace :packages do task create: :gitlab_environment do - Backup::Manager.new(progress).run_create_task('packages') + lock do + Backup::Manager.new(progress).run_create_task('packages') + end end task restore: :gitlab_environment do - Backup::Manager.new(progress).run_restore_task('packages') + lock do + Backup::Manager.new(progress).run_restore_task('packages') + end end end @@ -132,6 +179,35 @@ namespace :gitlab do $stdout end end + + def lock + File.open(PID_FILE, File::RDWR | File::CREAT, 0644) do |f| + f.flock(File::LOCK_EX) + + unless f.read.empty? + # There is a PID inside so the process fails + progress.puts(<<~HEREDOC.color(:red)) + Backup and restore in progress: + There is a backup and restore task in progress. Please, try to run the current task once the previous one ends. + If there is no other process running, please remove the PID file manually: rm #{PID_FILE} + HEREDOC + + exit 1 + end + + f.write(PID) + f.flush + ensure + f.flock(File::LOCK_UN) + end + + begin + yield + ensure + progress.puts "#{Time.now} " + "-- Deleting backup and restore lock file".color(:blue) + File.delete(PID_FILE) + end + end end # namespace end: backup end diff --git a/lib/tasks/gitlab/db.rake b/lib/tasks/gitlab/db.rake index 30e0e3e72ff..4ef0c396f4a 100644 --- a/lib/tasks/gitlab/db.rake +++ b/lib/tasks/gitlab/db.rake @@ -304,14 +304,30 @@ namespace :gitlab do end namespace :migration_testing do - desc 'Run migrations with instrumentation' + # Not possible to import Gitlab::Database::DATABASE_NAMES here + # Specs verify that a task exists for each entry in that array. + all_databases = %i[main ci] + task up: :environment do - Gitlab::Database::Migrations::Runner.up.run + Gitlab::Database::Migrations::Runner.up(database: 'main', legacy_mode: true).run + end + + namespace :up do + all_databases.each do |db| + desc "Run migrations on #{db} with instrumentation" + task db => :environment do + Gitlab::Database::Migrations::Runner.up(database: db).run + end + end end - desc 'Run down migrations in current branch with instrumentation' - task down: :environment do - Gitlab::Database::Migrations::Runner.down.run + namespace :down do + all_databases.each do |db| + desc "Run down migrations on #{db} in current branch with instrumentation" + task db => :environment do + Gitlab::Database::Migrations::Runner.down(database: db).run + end + end end desc 'Sample traditional background migrations with instrumentation' @@ -321,12 +337,24 @@ namespace :gitlab do Gitlab::Database::Migrations::Runner.background_migrations.run_jobs(for_duration: duration) end - desc 'Sample batched background migrations with instrumentation' + namespace :sample_batched_background_migrations do + all_databases.each do |db| + desc "Sample batched background migrations on #{db} with instrumentation" + task db, [:duration_s] => [:environment] do |_t, args| + duration = args[:duration_s]&.to_i&.seconds || 30.minutes # Default of 30 minutes + + Gitlab::Database::Migrations::Runner.batched_background_migrations(for_database: db) + .run_jobs(for_duration: duration) + end + end + end + + desc "Sample batched background migrations with instrumentation (legacy)" task :sample_batched_background_migrations, [:database, :duration_s] => [:environment] do |_t, args| - database_name = args[:database] || 'main' duration = args[:duration_s]&.to_i&.seconds || 30.minutes # Default of 30 minutes - Gitlab::Database::Migrations::Runner.batched_background_migrations(for_database: database_name) + database = args[:database] || 'main' + Gitlab::Database::Migrations::Runner.batched_background_migrations(for_database: database, legacy_mode: true) .run_jobs(for_duration: duration) end end diff --git a/lib/tasks/gitlab/db/lock_writes.rake b/lib/tasks/gitlab/db/lock_writes.rake index eb6d257cac5..421c6a90fdd 100644 --- a/lib/tasks/gitlab/db/lock_writes.rake +++ b/lib/tasks/gitlab/db/lock_writes.rake @@ -14,7 +14,8 @@ namespace :gitlab do table_name: table_name, connection: connection, database_name: database_name, - logger: Logger.new($stdout) + logger: Logger.new($stdout), + dry_run: ENV['DRY_RUN'] == 'true' ) if schemas_for_connection.include?(schema_name.to_sym) diff --git a/lib/tasks/gitlab/openapi.rake b/lib/tasks/gitlab/openapi.rake new file mode 100644 index 00000000000..fd067a1bf0b --- /dev/null +++ b/lib/tasks/gitlab/openapi.rake @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +require 'logger' + +if Rails.env.development? + require 'grape-swagger/rake/oapi_tasks' + GrapeSwagger::Rake::OapiTasks.new('::API::API') +end + +namespace :gitlab do + namespace :openapi do + task :generate do + raise 'This task can only be run in the development environment' unless Rails.env.development? + + ENV['store'] = 'tmp/openapi.json' + Rake::Task["oapi:fetch"].invoke(['openapi.json']) + + yaml_content = Gitlab::Json.parse(File.read('tmp/openapi_swagger_doc.json')).to_yaml + + File.write("doc/api/openapi/openapi_v2.yaml", yaml_content) + end + end +end diff --git a/lib/tasks/gitlab/seed.rake b/lib/tasks/gitlab/seed.rake index 36761165af5..7b9c57b1876 100644 --- a/lib/tasks/gitlab/seed.rake +++ b/lib/tasks/gitlab/seed.rake @@ -35,5 +35,40 @@ namespace :gitlab do puts "\n#{issues_created} issues created!" end end + + task :epics, [:group_full_path, :backfill_weeks, :average_issues_per_week] => :environment do |t, args| + args.with_defaults(backfill_weeks: 5, average_issues_per_week: 2) + + groups = + if args.group_full_path + group = Group.find_by_full_path(args.group_full_path) + + unless group + error_message = "Group '#{args.group_full_path}' does not exist!" + potential_groups = Group.search(args.group_full_path) + + if potential_groups.present? + error_message += " Did you mean '#{potential_groups.first.full_path}'?" + end + + puts error_message.color(:red) + exit 1 + end + + [group] + else + Group.not_mass_generated.find_each + end + + groups.each do |group| + puts "\nSeeding epics for the '#{group.full_path}' group" + seeder = Quality::Seeders::Epics.new(group: group) + epics = seeder.seed( + backfill_weeks: args.backfill_weeks.to_i, + average_issues_per_week: args.average_issues_per_week.to_i + ) + puts "\n#{epics} epics created!" + end + end end end diff --git a/lib/tasks/gitlab/snippets.rake b/lib/tasks/gitlab/snippets.rake index b55f82480e1..d7f71468102 100644 --- a/lib/tasks/gitlab/snippets.rake +++ b/lib/tasks/gitlab/snippets.rake @@ -60,7 +60,7 @@ namespace :gitlab do end def migration_running? - Sidekiq::ScheduledSet.new.any? { |r| r.klass == 'BackgroundMigrationWorker' && r.args[0] == 'BackfillSnippetRepositories' } + Sidekiq::ScheduledSet.new.any? { |r| r.klass == 'BackgroundMigrationWorker' && r.args[0] == 'BackfillSnippetRepositories' } # rubocop:disable Cop/SidekiqApiUsage end # @example diff --git a/lib/tasks/gitlab/tw/codeowners.rake b/lib/tasks/gitlab/tw/codeowners.rake index 148801254bf..fd9c7114979 100644 --- a/lib/tasks/gitlab/tw/codeowners.rake +++ b/lib/tasks/gitlab/tw/codeowners.rake @@ -7,10 +7,10 @@ namespace :tw do task :codeowners do CodeOwnerRule = Struct.new(:category, :writer) DocumentOwnerMapping = Struct.new(:path, :writer) do - def writer_owns_all_pages?(mappings) - mappings - .select { |mapping| mapping.directory == directory } - .all? { |mapping| mapping.writer == writer } + def writer_owns_directory?(mappings) + dir_mappings = mappings.select { |mapping| mapping.directory == directory } + + dir_mappings.count { |mapping| mapping.writer == writer } / dir_mappings.length.to_f > 0.5 end def directory @@ -22,13 +22,12 @@ namespace :tw do CodeOwnerRule.new('Activation', '@phillipwells'), CodeOwnerRule.new('Acquisition', '@phillipwells'), CodeOwnerRule.new('Anti-Abuse', '@phillipwells'), - CodeOwnerRule.new('Authentication and Authorization', '@eread'), + CodeOwnerRule.new('Authentication and Authorization', '@jglassman1'), CodeOwnerRule.new('Certify', '@msedlakjakubowski'), CodeOwnerRule.new('Code Review', '@aqualls'), CodeOwnerRule.new('Compliance', '@eread'), CodeOwnerRule.new('Composition Analysis', '@rdickenson'), CodeOwnerRule.new('Configure', '@phillipwells'), - CodeOwnerRule.new('Container Security', '@claytoncornell'), CodeOwnerRule.new('Contributor Experience', '@eread'), CodeOwnerRule.new('Conversion', '@kpaizee'), CodeOwnerRule.new('Database', '@aqualls'), @@ -39,7 +38,7 @@ namespace :tw do CodeOwnerRule.new('Documentation Guidelines', '@sselhorn'), CodeOwnerRule.new('Dynamic Analysis', '@rdickenson'), CodeOwnerRule.new('Ecosystem', '@kpaizee'), - CodeOwnerRule.new('Editor', '@aqualls'), + CodeOwnerRule.new('Editor', '@ashrafkhamis'), CodeOwnerRule.new('Foundations', '@rdickenson'), CodeOwnerRule.new('Fuzz Testing', '@rdickenson'), CodeOwnerRule.new('Geo', '@axil'), @@ -58,6 +57,7 @@ namespace :tw do CodeOwnerRule.new('Pipeline Execution', '@marcel.amirault'), CodeOwnerRule.new('Pipeline Insights', '@marcel.amirault'), CodeOwnerRule.new('Portfolio Management', '@msedlakjakubowski'), + CodeOwnerRule.new('Product Analytics', '@lciutacu'), CodeOwnerRule.new('Product Intelligence', '@claytoncornell'), CodeOwnerRule.new('Product Planning', '@msedlakjakubowski'), CodeOwnerRule.new('Project Management', '@msedlakjakubowski'), @@ -68,6 +68,7 @@ namespace :tw do CodeOwnerRule.new('Respond', '@msedlakjakubowski'), CodeOwnerRule.new('Runner', '@sselhorn'), CodeOwnerRule.new('Pods', '@sselhorn'), + CodeOwnerRule.new('Security Policies', '@claytoncornell'), CodeOwnerRule.new('Source Code', '@aqualls'), CodeOwnerRule.new('Static Analysis', '@rdickenson'), CodeOwnerRule.new('Style Guide', '@sselhorn'), @@ -114,14 +115,14 @@ namespace :tw do deduplicated_mappings = Set.new mappings.each do |mapping| - if mapping.writer_owns_all_pages?(mappings) + if mapping.writer_owns_directory?(mappings) deduplicated_mappings.add("#{mapping.directory}/ #{mapping.writer}") else deduplicated_mappings.add("#{mapping.path} #{mapping.writer}") end end - deduplicated_mappings.each { |mapping| puts mapping } + deduplicated_mappings.sort.each { |mapping| puts mapping } if errors.present? puts "-----" diff --git a/lib/tasks/gitlab/usage_data.rake b/lib/tasks/gitlab/usage_data.rake index 73a79427da3..159b70cd673 100644 --- a/lib/tasks/gitlab/usage_data.rake +++ b/lib/tasks/gitlab/usage_data.rake @@ -43,17 +43,10 @@ namespace :gitlab do # Do not edit it manually! BANNER - repository_includes = ci_template_includes_hash(:repository_source) - auto_devops_jobs_includes = ci_template_includes_hash(:auto_devops_source, 'Jobs') - auto_devops_security_includes = ci_template_includes_hash(:auto_devops_source, 'Security') - all_includes = [ - *repository_includes, - ci_template_event('p_ci_templates_implicit_auto_devops'), - *auto_devops_jobs_includes, - *auto_devops_security_includes - ] - - File.write(Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH, banner + YAML.dump(all_includes).gsub(/ *$/m, '')) + all_includes = explicit_template_includes + implicit_auto_devops_includes + yaml = banner + YAML.dump(all_includes).gsub(/ *$/m, '') + + File.write(Gitlab::UsageDataCounters::CiTemplateUniqueCounter::KNOWN_EVENTS_FILE_PATH, yaml) end desc 'GitLab | UsageDataMetrics | Generate raw SQL metrics queries for RSpec' @@ -65,16 +58,27 @@ namespace :gitlab do end FileUtils.mkdir_p(path) - FileUtils.chdir(path) - File.write('sql_metrics_queries.json', Gitlab::Json.pretty_generate(queries)) + File.write(File.join(path, 'sql_metrics_queries.json'), Gitlab::Json.pretty_generate(queries)) + end + + # Events for templates included via YAML-less Auto-DevOps + def implicit_auto_devops_includes + Gitlab::UsageDataCounters::CiTemplateUniqueCounter + .all_included_templates('Auto-DevOps.gitlab-ci.yml') + .map { |template| implicit_auto_devops_event(template) } + .uniq + .sort_by { _1['name'] } end - def ci_template_includes_hash(source, template_directory = nil) - Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_templates("lib/gitlab/ci/templates/#{template_directory}").map do |template| - expanded_template_name = Gitlab::UsageDataCounters::CiTemplateUniqueCounter.expand_template_name("#{template_directory}/#{template}") - event_name = Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_template_event_name(expanded_template_name, source) + # Events for templates included in a .gitlab-ci.yml using include:template + def explicit_template_includes + Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_templates("lib/gitlab/ci/templates/").each_with_object([]) do |template, result| + expanded_template_name = Gitlab::UsageDataCounters::CiTemplateUniqueCounter.expand_template_name(template) + next unless expanded_template_name # guard against templates unavailable on FOSS - ci_template_event(event_name) + event_name = Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_template_event_name(expanded_template_name, :repository_source) + + result << ci_template_event(event_name) end end @@ -86,5 +90,10 @@ namespace :gitlab do 'aggregation' => 'weekly' } end + + def implicit_auto_devops_event(expanded_template_name) + event_name = Gitlab::UsageDataCounters::CiTemplateUniqueCounter.ci_template_event_name(expanded_template_name, :auto_devops_source) + ci_template_event(event_name) + end end end -- cgit v1.2.3