diff options
Diffstat (limited to 'lib/gitlab')
309 files changed, 4091 insertions, 1791 deletions
diff --git a/lib/gitlab/alert_management/payload/base.rb b/lib/gitlab/alert_management/payload/base.rb index 5e535ded439..2d769148c5f 100644 --- a/lib/gitlab/alert_management/payload/base.rb +++ b/lib/gitlab/alert_management/payload/base.rb @@ -102,19 +102,19 @@ module Gitlab # AlertManagement::Alert directly for read operations. def alert_params { - description: description&.truncate(::AlertManagement::Alert::DESCRIPTION_MAX_LENGTH), + description: truncate(description, ::AlertManagement::Alert::DESCRIPTION_MAX_LENGTH), ended_at: ends_at, environment: environment, fingerprint: gitlab_fingerprint, hosts: truncate_hosts(Array(hosts).flatten), - monitoring_tool: monitoring_tool&.truncate(::AlertManagement::Alert::TOOL_MAX_LENGTH), + monitoring_tool: truncate(monitoring_tool, ::AlertManagement::Alert::TOOL_MAX_LENGTH), payload: payload, project_id: project.id, prometheus_alert: gitlab_alert, - service: service&.truncate(::AlertManagement::Alert::SERVICE_MAX_LENGTH), + service: truncate(service, ::AlertManagement::Alert::SERVICE_MAX_LENGTH), severity: severity, started_at: starts_at, - title: title&.truncate(::AlertManagement::Alert::TITLE_MAX_LENGTH) + title: truncate(title, ::AlertManagement::Alert::TITLE_MAX_LENGTH) }.transform_values(&:presence).compact end @@ -161,6 +161,10 @@ module Gitlab SEVERITY_MAPPING end + def truncate(value, length) + value.to_s.truncate(length) + end + def truncate_hosts(hosts) return hosts if hosts.join.length <= ::AlertManagement::Alert::HOSTS_MAX_LENGTH diff --git a/lib/gitlab/application_context.rb b/lib/gitlab/application_context.rb index 3e095585b18..0f0ecd82a32 100644 --- a/lib/gitlab/application_context.rb +++ b/lib/gitlab/application_context.rb @@ -21,6 +21,8 @@ module Gitlab :related_class, :feature_category, :artifact_size, + :artifacts_dependencies_size, + :artifacts_dependencies_count, :root_caller_id ].freeze private_constant :KNOWN_KEYS @@ -36,6 +38,8 @@ module Gitlab Attribute.new(:related_class, String), Attribute.new(:feature_category, String), Attribute.new(:artifact, ::Ci::JobArtifact), + Attribute.new(:artifacts_dependencies_size, Integer), + Attribute.new(:artifacts_dependencies_count, Integer), Attribute.new(:root_caller_id, String) ].freeze @@ -82,15 +86,18 @@ module Gitlab # rubocop: disable Metrics/PerceivedComplexity def to_lazy_hash {}.tap do |hash| - hash[:user] = -> { username } if include_user? - hash[:project] = -> { project_path } if include_project? - hash[:root_namespace] = -> { root_namespace_path } if include_namespace? - hash[:client_id] = -> { client } if include_client? assign_hash_if_value(hash, :caller_id) assign_hash_if_value(hash, :root_caller_id) assign_hash_if_value(hash, :remote_ip) assign_hash_if_value(hash, :related_class) assign_hash_if_value(hash, :feature_category) + assign_hash_if_value(hash, :artifacts_dependencies_size) + assign_hash_if_value(hash, :artifacts_dependencies_count) + + hash[:user] = -> { username } if include_user? + hash[:project] = -> { project_path } if include_project? + hash[:root_namespace] = -> { root_namespace_path } if include_namespace? + hash[:client_id] = -> { client } if include_client? hash[:pipeline_id] = -> { job&.pipeline_id } if set_values.include?(:job) hash[:job_id] = -> { job&.id } if set_values.include?(:job) hash[:artifact_size] = -> { artifact&.size } if set_values.include?(:artifact) @@ -112,7 +119,9 @@ module Gitlab end def assign_hash_if_value(hash, attribute_name) - raise ArgumentError unless KNOWN_KEYS.include?(attribute_name) + unless KNOWN_KEYS.include?(attribute_name) + raise ArgumentError, "unknown attribute `#{attribute_name}`" + end # rubocop:disable GitlabSecurity/PublicSend hash[attribute_name] = public_send(attribute_name) if set_values.include?(attribute_name) diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb index 0c52ce8aba4..a2d79b189a3 100644 --- a/lib/gitlab/application_rate_limiter.rb +++ b/lib/gitlab/application_rate_limiter.rb @@ -65,6 +65,8 @@ module Gitlab # per user (scope)) # @param threshold [Integer] Optional threshold value to override default # one registered in `.rate_limits` + # @param interval [Integer] Optional interval value to override default + # one registered in `.rate_limits` # @param users_allowlist [Array<String>] Optional list of usernames to # exclude from the limit. This param will only be functional if Scope # includes a current user. @@ -72,7 +74,7 @@ module Gitlab # incremented but the current throttled state will be returned. # # @return [Boolean] Whether or not a request should be throttled - def throttled?(key, scope:, resource: nil, threshold: nil, users_allowlist: nil, peek: false) + def throttled?(key, scope:, resource: nil, threshold: nil, interval: nil, users_allowlist: nil, peek: false) raise InvalidKeyError unless rate_limits[key] strategy = resource.present? ? IncrementPerActionedResource.new(resource.id) : IncrementPerAction.new @@ -85,7 +87,7 @@ module Gitlab return false if threshold_value == 0 - interval_value = interval(key) + interval_value = interval || interval(key) return false if interval_value == 0 @@ -112,11 +114,12 @@ module Gitlab # @param key [Symbol] Key attribute registered in `.rate_limits` # @param scope [Array<ActiveRecord>] Array of ActiveRecord models to scope throttling to a specific request (e.g. per user per project) # @param threshold [Integer] Optional threshold value to override default one registered in `.rate_limits` + # @param interval [Integer] Optional interval value to override default one registered in `.rate_limits` # @param users_allowlist [Array<String>] Optional list of usernames to exclude from the limit. This param will only be functional if Scope includes a current user. # # @return [Boolean] Whether or not a request is currently throttled - def peek(key, scope:, threshold: nil, users_allowlist: nil) - throttled?(key, peek: true, scope: scope, threshold: threshold, users_allowlist: users_allowlist) + def peek(key, scope:, threshold: nil, interval: nil, users_allowlist: nil) + throttled?(key, peek: true, scope: scope, threshold: threshold, interval: interval, users_allowlist: users_allowlist) end # Logs request using provided logger diff --git a/lib/gitlab/audit/auditor.rb b/lib/gitlab/audit/auditor.rb new file mode 100644 index 00000000000..c96be19f02d --- /dev/null +++ b/lib/gitlab/audit/auditor.rb @@ -0,0 +1,175 @@ +# frozen_string_literal: true + +module Gitlab + module Audit + class Auditor + attr_reader :scope, :name + + # Record audit events + # + # @param [Hash] context + # @option context [String] :name the operation name to be audited, used for error tracking + # @option context [User] :author the user who authors the change + # @option context [User, Project, Group] :scope the scope which audit event belongs to + # @option context [Object] :target the target object being audited + # @option context [String] :message the message describing the action + # @option context [Hash] :additional_details the additional details we want to merge into audit event details. + # @option context [Time] :created_at the time that the event occurred (defaults to the current time) + # + # @example Using block (useful when events are emitted deep in the call stack) + # i.e. multiple audit events + # + # audit_context = { + # name: 'merge_approval_rule_updated', + # author: current_user, + # scope: project_alpha, + # target: merge_approval_rule, + # message: 'a user has attempted to update an approval rule' + # } + # + # # in the initiating service + # Gitlab::Audit::Auditor.audit(audit_context) do + # service.execute + # end + # + # # in the model + # Auditable.push_audit_event('an approver has been added') + # Auditable.push_audit_event('an approval group has been removed') + # + # @example Using standard method call + # i.e. single audit event + # + # merge_approval_rule.save + # Gitlab::Audit::Auditor.audit(audit_context) + # + # @return result of block execution + def self.audit(context, &block) + auditor = new(context) + + return unless auditor.audit_enabled? + + if block + auditor.multiple_audit(&block) + else + auditor.single_audit + end + end + + def initialize(context = {}) + @context = context + + @name = @context.fetch(:name, 'audit_operation') + @stream_only = @context.fetch(:stream_only, false) + @author = @context.fetch(:author) + @scope = @context.fetch(:scope) + @target = @context.fetch(:target) + @created_at = @context.fetch(:created_at, DateTime.current) + @message = @context.fetch(:message, '') + @additional_details = @context.fetch(:additional_details, {}) + @ip_address = @context[:ip_address] + @target_details = @context[:target_details] + @authentication_event = @context.fetch(:authentication_event, false) + @authentication_provider = @context[:authentication_provider] + end + + def single_audit + events = [build_event(@message)] + + record(events) + end + + def multiple_audit + # For now we dont have any need to implement multiple audit event functionality in CE + # Defined in EE + end + + def record(events) + log_events(events) unless @stream_only + send_to_stream(events) + end + + def log_events(events) + log_authentication_event + log_to_database(events) + log_to_file(events) + end + + def audit_enabled? + authentication_event? + end + + def authentication_event? + @authentication_event + end + + def log_authentication_event + return unless Gitlab::Database.read_write? && authentication_event? + + event = AuthenticationEvent.new(authentication_event_payload) + event.save! + rescue ActiveRecord::RecordInvalid => e + ::Gitlab::ErrorTracking.track_exception(e, audit_operation: @name) + end + + def authentication_event_payload + { + # @author can be a User or various Gitlab::Audit authors. + # Only capture real users for successful authentication events. + user: author_if_user, + user_name: @author.name, + ip_address: @ip_address, + result: AuthenticationEvent.results[:success], + provider: @authentication_provider + } + end + + def author_if_user + @author if @author.is_a?(User) + end + + def send_to_stream(events) + # Defined in EE + end + + def build_event(message) + AuditEvents::BuildService.new( + author: @author, + scope: @scope, + target: @target, + created_at: @created_at, + message: message, + additional_details: @additional_details, + ip_address: @ip_address, + target_details: @target_details + ).execute + end + + def log_to_database(events) + AuditEvent.bulk_insert!(events) + rescue ActiveRecord::RecordInvalid => e + ::Gitlab::ErrorTracking.track_exception(e, audit_operation: @name) + end + + def log_to_file(events) + file_logger = ::Gitlab::AuditJsonLogger.build + + events.each { |event| file_logger.info(log_payload(event)) } + end + + private + + def log_payload(event) + payload = event.as_json + details = formatted_details(event.details) + payload["details"] = details + payload.merge!(details).as_json + end + + def formatted_details(details) + details.merge(details.slice(:from, :to).transform_values(&:to_s)) + end + end + end +end + +Gitlab::Audit::Auditor.prepend_mod_with("Gitlab::Audit::Auditor") diff --git a/lib/gitlab/audit/deploy_key_author.rb b/lib/gitlab/audit/deploy_key_author.rb new file mode 100644 index 00000000000..53029e9cc1c --- /dev/null +++ b/lib/gitlab/audit/deploy_key_author.rb @@ -0,0 +1,15 @@ +# frozen_string_literal: true + +module Gitlab + module Audit + class DeployKeyAuthor < Gitlab::Audit::NullAuthor + def initialize(name: nil) + super(id: -3, name: name) + end + + def name + @name || _('Deploy Key') + end + end + end +end diff --git a/lib/gitlab/audit/null_author.rb b/lib/gitlab/audit/null_author.rb index 08be6ae6d9f..cb0dfe45aef 100644 --- a/lib/gitlab/audit/null_author.rb +++ b/lib/gitlab/audit/null_author.rb @@ -24,6 +24,8 @@ module Gitlab Gitlab::Audit::UnauthenticatedAuthor.new(name: name) elsif id == -2 Gitlab::Audit::DeployTokenAuthor.new(name: name) + elsif id == -3 + Gitlab::Audit::DeployKeyAuthor.new(name: name) else Gitlab::Audit::DeletedAuthor.new(id: id, name: name) end diff --git a/lib/gitlab/audit/null_target.rb b/lib/gitlab/audit/null_target.rb new file mode 100644 index 00000000000..ed3a50e9067 --- /dev/null +++ b/lib/gitlab/audit/null_target.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Gitlab + module Audit + class NullTarget + def id + nil + end + + def type + nil + end + + def details + nil + end + end + end +end diff --git a/lib/gitlab/audit/target.rb b/lib/gitlab/audit/target.rb new file mode 100644 index 00000000000..b9cb54aece8 --- /dev/null +++ b/lib/gitlab/audit/target.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module Gitlab + module Audit + class Target + delegate :id, to: :@object + + def initialize(object) + @object = object + end + + def type + @object.class.name + end + + def details + @object.try(:name) || @object.try(:audit_details) || 'unknown' + end + end + end +end diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb index 6c3487c28ea..6213dd203c4 100644 --- a/lib/gitlab/auth.rb +++ b/lib/gitlab/auth.rb @@ -92,7 +92,7 @@ module Gitlab return unless authenticate_using_internal_or_ldap_password? Gitlab::Auth::UniqueIpsLimiter.limit_user! do - user = User.by_login(login) + user = User.find_by_login(login) break if user && !user.can_log_in_with_non_expired_password? @@ -279,7 +279,7 @@ module Gitlab if deploy_key_matches DeployKey.find(deploy_key_matches[1]) else - User.by_login(login) + User.find_by_login(login) end return unless actor diff --git a/lib/gitlab/auth/auth_finders.rb b/lib/gitlab/auth/auth_finders.rb index 7adaaef86e4..c994f179b66 100644 --- a/lib/gitlab/auth/auth_finders.rb +++ b/lib/gitlab/auth/auth_finders.rb @@ -103,7 +103,7 @@ module Gitlab return unless has_basic_credentials?(current_request) login, token = user_name_and_password(current_request) - user = User.by_login(login) + user = User.find_by_login(login) user if user && Gitlab::LfsToken.new(user).token_valid?(token) end diff --git a/lib/gitlab/auth/ip_rate_limiter.rb b/lib/gitlab/auth/ip_rate_limiter.rb index f301a2ec2e8..0d50420b9f5 100644 --- a/lib/gitlab/auth/ip_rate_limiter.rb +++ b/lib/gitlab/auth/ip_rate_limiter.rb @@ -33,6 +33,10 @@ module Gitlab Rack::Attack::Allow2Ban.banned?(ip) end + def trusted_ip? + trusted_ips.any? { |netmask| netmask.include?(ip) } + end + private def skip_rate_limit? @@ -47,10 +51,6 @@ module Gitlab Gitlab.config.rack_attack.git_basic_auth end - def trusted_ip? - trusted_ips.any? { |netmask| netmask.include?(ip) } - end - def trusted_ips strong_memoize(:trusted_ips) do config.ip_whitelist.map do |proxy| diff --git a/lib/gitlab/auth/o_auth/auth_hash.rb b/lib/gitlab/auth/o_auth/auth_hash.rb index a45778159c7..37f92792d2d 100644 --- a/lib/gitlab/auth/o_auth/auth_hash.rb +++ b/lib/gitlab/auth/o_auth/auth_hash.rb @@ -59,14 +59,43 @@ module Gitlab auth_hash['info'] end - def get_info(key) - value = info[key] + def coerce_utf8(value) value.is_a?(String) ? Gitlab::Utils.force_utf8(value) : value end + def get_info(key) + coerce_utf8(info[key]) + end + + def provider_config + Gitlab::Auth::OAuth::Provider.config_for(@provider) || {} + end + + def provider_args + @provider_args ||= provider_config['args'].presence || {} + end + + def get_from_auth_hash_or_info(key) + coerce_utf8(auth_hash[key]) || get_info(key) + end + + # Allow for configuring a custom username claim per provider from + # the auth hash or use the canonical username or nickname fields + def gitlab_username_claim + provider_args.dig('gitlab_username_claim')&.to_sym + end + + def username_claims + [gitlab_username_claim, :username, :nickname].compact + end + + def get_username + username_claims.map { |claim| get_from_auth_hash_or_info(claim) }.find { |name| name.presence } + end + def username_and_email @username_and_email ||= begin - username = get_info(:username).presence || get_info(:nickname).presence + username = get_username email = get_info(:email).presence username ||= generate_username(email) if email diff --git a/lib/gitlab/background_migration/backfill_ci_namespace_mirrors.rb b/lib/gitlab/background_migration/backfill_ci_namespace_mirrors.rb deleted file mode 100644 index 2247747ba08..00000000000 --- a/lib/gitlab/background_migration/backfill_ci_namespace_mirrors.rb +++ /dev/null @@ -1,77 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # A job to create ci_namespace_mirrors entries in batches - class BackfillCiNamespaceMirrors - class Namespace < ActiveRecord::Base # rubocop:disable Style/Documentation - include ::EachBatch - - self.table_name = 'namespaces' - self.inheritance_column = nil - - scope :base_query, -> do - select(:id, :parent_id) - end - end - - PAUSE_SECONDS = 0.1 - SUB_BATCH_SIZE = 500 - - def perform(start_id, end_id) - batch_query = Namespace.base_query.where(id: start_id..end_id) - batch_query.each_batch(of: SUB_BATCH_SIZE) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('MIN(id), MAX(id)')).first - ranged_query = Namespace.unscoped.base_query.where(id: first..last) - - update_sql = <<~SQL - INSERT INTO ci_namespace_mirrors (namespace_id, traversal_ids) - #{insert_values(ranged_query)} - ON CONFLICT (namespace_id) DO NOTHING - SQL - # We do nothing on conflict because we consider they were already filled. - - Namespace.connection.execute(update_sql) - - sleep PAUSE_SECONDS - end - - mark_job_as_succeeded(start_id, end_id) - end - - private - - def insert_values(batch) - calculated_traversal_ids( - batch.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/336433') - ) - end - - # Copied from lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb - def calculated_traversal_ids(batch) - <<~SQL - WITH RECURSIVE cte(source_id, namespace_id, parent_id, height) AS ( - ( - SELECT batch.id, batch.id, batch.parent_id, 1 - FROM (#{batch.to_sql}) AS batch - ) - UNION ALL - ( - SELECT cte.source_id, n.id, n.parent_id, cte.height+1 - FROM namespaces n, cte - WHERE n.id = cte.parent_id - ) - ) - SELECT flat_hierarchy.source_id as namespace_id, - array_agg(flat_hierarchy.namespace_id ORDER BY flat_hierarchy.height DESC) as traversal_ids - FROM (SELECT * FROM cte FOR UPDATE) flat_hierarchy - GROUP BY flat_hierarchy.source_id - SQL - end - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded('BackfillCiNamespaceMirrors', arguments) - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_ci_project_mirrors.rb b/lib/gitlab/background_migration/backfill_ci_project_mirrors.rb deleted file mode 100644 index ff6ab9928b0..00000000000 --- a/lib/gitlab/background_migration/backfill_ci_project_mirrors.rb +++ /dev/null @@ -1,52 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # A job to create ci_project_mirrors entries in batches - class BackfillCiProjectMirrors - class Project < ActiveRecord::Base # rubocop:disable Style/Documentation - include ::EachBatch - - self.table_name = 'projects' - - scope :base_query, -> do - select(:id, :namespace_id) - end - end - - PAUSE_SECONDS = 0.1 - SUB_BATCH_SIZE = 500 - - def perform(start_id, end_id) - batch_query = Project.base_query.where(id: start_id..end_id) - batch_query.each_batch(of: SUB_BATCH_SIZE) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('MIN(id), MAX(id)')).first - ranged_query = Project.unscoped.base_query.where(id: first..last) - - update_sql = <<~SQL - INSERT INTO ci_project_mirrors (project_id, namespace_id) - #{insert_values(ranged_query)} - ON CONFLICT (project_id) DO NOTHING - SQL - # We do nothing on conflict because we consider they were already filled. - - Project.connection.execute(update_sql) - - sleep PAUSE_SECONDS - end - - mark_job_as_succeeded(start_id, end_id) - end - - private - - def insert_values(batch) - batch.allow_cross_joins_across_databases(url: 'https://gitlab.com/gitlab-org/gitlab/-/issues/336433').to_sql - end - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded('BackfillCiProjectMirrors', arguments) - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_ci_runner_semver.rb b/lib/gitlab/background_migration/backfill_ci_runner_semver.rb deleted file mode 100644 index 0901649f789..00000000000 --- a/lib/gitlab/background_migration/backfill_ci_runner_semver.rb +++ /dev/null @@ -1,31 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - # A job to update semver column in ci_runners in batches based on existing version values - class BackfillCiRunnerSemver < Gitlab::BackgroundMigration::BatchedMigrationJob - def perform - each_sub_batch( - operation_name: :backfill_ci_runner_semver, - batching_scope: ->(relation) { relation.where('semver::cidr IS NULL') } - ) do |sub_batch| - ranged_query = sub_batch.select( - %q(id AS r_id, - substring(ci_runners.version FROM 'v?(\d+\.\d+\.\d+)') AS extracted_semver) - ) - - update_sql = <<~SQL - UPDATE - ci_runners - SET semver = extracted_semver - FROM (#{ranged_query.to_sql}) v - WHERE id = v.r_id - AND v.extracted_semver IS NOT NULL - SQL - - connection.execute(update_sql) - end - end - end - end -end diff --git a/lib/gitlab/background_migration/backfill_group_features.rb b/lib/gitlab/background_migration/backfill_group_features.rb index 4c3af7be319..35b5282360f 100644 --- a/lib/gitlab/background_migration/backfill_group_features.rb +++ b/lib/gitlab/background_migration/backfill_group_features.rb @@ -4,19 +4,21 @@ module Gitlab module BackgroundMigration # Backfill group_features for an array of groups class BackfillGroupFeatures < ::Gitlab::BackgroundMigration::BatchedMigrationJob - def perform(batch_size) + job_arguments :batch_size + + def perform each_sub_batch( operation_name: :upsert_group_features, batching_arguments: { order_hint: :type }, batching_scope: ->(relation) { relation.where(type: 'Group') } ) do |sub_batch| - upsert_group_features(sub_batch, batch_size) + upsert_group_features(sub_batch) end end private - def upsert_group_features(relation, batch_size) + def upsert_group_features(relation) connection.execute( <<~SQL INSERT INTO group_features (group_id, created_at, updated_at) diff --git a/lib/gitlab/background_migration/backfill_integrations_type_new.rb b/lib/gitlab/background_migration/backfill_integrations_type_new.rb index 6f33472af7d..b07d9371c19 100644 --- a/lib/gitlab/background_migration/backfill_integrations_type_new.rb +++ b/lib/gitlab/background_migration/backfill_integrations_type_new.rb @@ -27,7 +27,7 @@ module Gitlab def process_sub_batch(sub_batch) # Extract the start/stop IDs from the current sub-batch - sub_start_id, sub_stop_id = sub_batch.pluck(Arel.sql('MIN(id), MAX(id)')).first + sub_start_id, sub_stop_id = sub_batch.pick(Arel.sql('MIN(id), MAX(id)')) # This matches the mapping from the INSERT trigger added in # db/migrate/20210721135638_add_triggers_to_integrations_type_new.rb diff --git a/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads.rb b/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads.rb new file mode 100644 index 00000000000..cd349bf3ae1 --- /dev/null +++ b/lib/gitlab/background_migration/backfill_namespace_id_of_vulnerability_reads.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # Sets the `namespace_id` of the existing `vulnerability_reads` records + class BackfillNamespaceIdOfVulnerabilityReads < BatchedMigrationJob + UPDATE_SQL = <<~SQL + UPDATE + vulnerability_reads + SET + namespace_id = sub_query.namespace_id + FROM + (%<subquery>s) as sub_query + WHERE + vulnerability_reads.vulnerability_id = sub_query.vulnerability_id + SQL + + def perform + each_sub_batch(operation_name: :set_namespace_id) do |sub_batch| + update_query = update_query_for(sub_batch) + + connection.execute(update_query) + end + end + + private + + def update_query_for(sub_batch) + subquery = sub_batch.select("vulnerability_reads.vulnerability_id, projects.namespace_id") + .joins("INNER JOIN projects ON projects.id = vulnerability_reads.project_id") + + format(UPDATE_SQL, subquery: subquery.to_sql) + end + end + end +end diff --git a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb b/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb index 587de1bcb5a..3b8a452b855 100644 --- a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb +++ b/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children.rb @@ -19,7 +19,7 @@ module Gitlab def perform(start_id, end_id, sub_batch_size) batch_query = Namespace.base_query.where(id: start_id..end_id) batch_query.each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(id), max(id)')).first + first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) ranged_query = Namespace.unscoped.base_query.where(id: first..last) update_sql = <<~SQL diff --git a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots.rb b/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots.rb index 1c0a83285a6..c69289fb91f 100644 --- a/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots.rb +++ b/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots.rb @@ -22,7 +22,7 @@ module Gitlab .where("traversal_ids = '{}'") ranged_query.each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(id), max(id)')).first + first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) # The query need to be reconstructed because .each_batch modifies the default scope # See: https://gitlab.com/gitlab-org/gitlab/-/issues/330510 diff --git a/lib/gitlab/background_migration/backfill_project_import_level.rb b/lib/gitlab/background_migration/backfill_project_import_level.rb new file mode 100644 index 00000000000..06706b729ea --- /dev/null +++ b/lib/gitlab/background_migration/backfill_project_import_level.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true +# rubocop:disable Style/Documentation +module Gitlab + module BackgroundMigration + class BackfillProjectImportLevel < BatchedMigrationJob + LEVEL = { + Gitlab::Access::NO_ACCESS => [0], + Gitlab::Access::DEVELOPER => [2], + Gitlab::Access::MAINTAINER => [1], + Gitlab::Access::OWNER => [nil] + }.freeze + + def perform + each_sub_batch(operation_name: :update_import_level) do |sub_batch| + update_import_level(sub_batch) + end + end + + private + + def update_import_level(relation) + LEVEL.each do |import_level, creation_level| + namespace_ids = relation + .where(type: 'Group', project_creation_level: creation_level) + + NamespaceSetting.where( + namespace_id: namespace_ids + ).update_all(project_import_level: import_level) + end + end + end + end +end + +# rubocop:enable Style/Documentation diff --git a/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent.rb b/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent.rb new file mode 100644 index 00000000000..728b60f7a0e --- /dev/null +++ b/lib/gitlab/background_migration/backfill_vulnerability_reads_cluster_agent.rb @@ -0,0 +1,30 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # Backfills the `vulnerability_reads.casted_cluster_agent_id` column + class BackfillVulnerabilityReadsClusterAgent < Gitlab::BackgroundMigration::BatchedMigrationJob + CLUSTER_AGENTS_JOIN = <<~SQL + INNER JOIN cluster_agents + ON CAST(vulnerability_reads.cluster_agent_id AS bigint) = cluster_agents.id AND + vulnerability_reads.project_id = cluster_agents.project_id + SQL + + RELATION = ->(relation) do + relation + .where(report_type: 7) + end + + def perform + each_sub_batch( + operation_name: :update_all, + batching_scope: RELATION + ) do |sub_batch| + sub_batch + .joins(CLUSTER_AGENTS_JOIN) + .update_all('casted_cluster_agent_id = CAST(vulnerability_reads.cluster_agent_id AS bigint)') + end + end + end + end +end diff --git a/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb b/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb index a16efa4222b..32962f2bb89 100644 --- a/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb +++ b/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues.rb @@ -20,7 +20,7 @@ module Gitlab parent_batch_relation = relation_scoped_to_range(batch_table, batch_column, start_id, end_id, base_type) parent_batch_relation.each_batch(column: batch_column, of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(id), max(id)')).first + first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) # The query need to be reconstructed because .each_batch modifies the default scope # See: https://gitlab.com/gitlab-org/gitlab/-/issues/330510 diff --git a/lib/gitlab/background_migration/batched_migration_job.rb b/lib/gitlab/background_migration/batched_migration_job.rb index c47b1735ccf..11d15804344 100644 --- a/lib/gitlab/background_migration/batched_migration_job.rb +++ b/lib/gitlab/background_migration/batched_migration_job.rb @@ -3,22 +3,62 @@ module Gitlab module BackgroundMigration # Base class for batched background migrations. Subclasses should implement the `#perform` - # method as the entry point for the job's execution, which will be called with the migration - # arguments (if any). + # method as the entry point for the job's execution. + # + # Job arguments needed must be defined explicitly, + # see https://docs.gitlab.com/ee/development/database/batched_background_migrations.html#job-arguments. class BatchedMigrationJob include Gitlab::Database::DynamicModelHelpers - def initialize(start_id:, end_id:, batch_table:, batch_column:, sub_batch_size:, pause_ms:, connection:) + def initialize( + start_id:, end_id:, batch_table:, batch_column:, sub_batch_size:, pause_ms:, job_arguments: [], connection: + ) + @start_id = start_id @end_id = end_id @batch_table = batch_table @batch_column = batch_column @sub_batch_size = sub_batch_size @pause_ms = pause_ms + @job_arguments = job_arguments @connection = connection end - def perform(*job_arguments) + def self.generic_instance(batch_table:, batch_column:, job_arguments: [], connection:) + new( + batch_table: batch_table, batch_column: batch_column, + job_arguments: job_arguments, connection: connection, + start_id: 0, end_id: 0, sub_batch_size: 0, pause_ms: 0 + ) + end + + def self.job_arguments_count + 0 + end + + def self.job_arguments(*args) + args.each.with_index do |arg, index| + define_method(arg) do + @job_arguments[index] + end + end + + define_singleton_method(:job_arguments_count) do + args.count + end + end + + def self.scope_to(scope) + define_method(:filter_batch) do |relation| + instance_exec(relation, &scope) + end + end + + def filter_batch(relation) + relation + end + + def perform raise NotImplementedError, "subclasses of #{self.class.name} must implement #{__method__}" end @@ -33,9 +73,10 @@ module Gitlab def each_sub_batch(operation_name: :default, batching_arguments: {}, batching_scope: nil) all_batching_arguments = { column: batch_column, of: sub_batch_size }.merge(batching_arguments) - parent_relation = parent_batch_relation(batching_scope) + relation = filter_batch(base_relation) + sub_batch_relation = filter_sub_batch(relation, batching_scope) - parent_relation.each_batch(**all_batching_arguments) do |relation| + sub_batch_relation.each_batch(**all_batching_arguments) do |relation| batch_metrics.instrument_operation(operation_name) do yield relation end @@ -45,9 +86,13 @@ module Gitlab end def distinct_each_batch(operation_name: :default, batching_arguments: {}) + if base_relation != filter_batch(base_relation) + raise 'distinct_each_batch can not be used when additional filters are defined with scope_to' + end + all_batching_arguments = { column: batch_column, of: sub_batch_size }.merge(batching_arguments) - parent_batch_relation.distinct_each_batch(**all_batching_arguments) do |relation| + base_relation.distinct_each_batch(**all_batching_arguments) do |relation| batch_metrics.instrument_operation(operation_name) do yield relation end @@ -56,13 +101,15 @@ module Gitlab end end - def parent_batch_relation(batching_scope = nil) - parent_relation = define_batchable_model(batch_table, connection: connection) + def base_relation + define_batchable_model(batch_table, connection: connection) .where(batch_column => start_id..end_id) + end - return parent_relation unless batching_scope + def filter_sub_batch(relation, batching_scope = nil) + return relation unless batching_scope - batching_scope.call(parent_relation) + batching_scope.call(relation) end end end diff --git a/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy.rb b/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy.rb index 68be42dc0a0..12fd9ae7161 100644 --- a/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy.rb +++ b/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy.rb @@ -25,7 +25,7 @@ module Gitlab relation = model_class.where(projects_table[:namespace_id].in(hierarchy_cte_sql)).where("#{quoted_column_name} >= ?", batch_min_value) relation.each_batch(of: batch_size, column: column_name) do |batch| # rubocop:disable Lint/UnreachableLoop - next_batch_bounds = batch.pluck(Arel.sql("MIN(#{quoted_column_name}), MAX(#{quoted_column_name})")).first + next_batch_bounds = batch.pick(Arel.sql("MIN(#{quoted_column_name}), MAX(#{quoted_column_name})")) break end diff --git a/lib/gitlab/background_migration/batching_strategies/backfill_vulnerability_reads_cluster_agent_batching_strategy.rb b/lib/gitlab/background_migration/batching_strategies/backfill_vulnerability_reads_cluster_agent_batching_strategy.rb new file mode 100644 index 00000000000..f0d015198dc --- /dev/null +++ b/lib/gitlab/background_migration/batching_strategies/backfill_vulnerability_reads_cluster_agent_batching_strategy.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + module BatchingStrategies + # Batching class to use for back-filling vulnerability_read's casted_cluster_agent_id from cluster_agent_id. + # Batches will be scoped to records where the report_type belongs to cluster_image_scanning. + # + # If no more batches exist in the table, returns nil. + class BackfillVulnerabilityReadsClusterAgentBatchingStrategy < PrimaryKeyBatchingStrategy + CLUSTER_IMAGE_SCANNING_REPORT_TYPE = 7 + + def apply_additional_filters(relation, job_arguments: [], job_class: nil) + relation.where(report_type: CLUSTER_IMAGE_SCANNING_REPORT_TYPE) + end + end + end + end +end diff --git a/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy.rb b/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy.rb index 5cad9d2e3c4..fc08d2b0ab6 100644 --- a/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy.rb +++ b/lib/gitlab/background_migration/batching_strategies/loose_index_scan_batching_strategy.rb @@ -24,7 +24,7 @@ module Gitlab next_batch_bounds = nil relation.distinct_each_batch(of: batch_size, column: column_name) do |batch| # rubocop:disable Lint/UnreachableLoop - next_batch_bounds = batch.pluck(Arel.sql("MIN(#{quoted_column_name}), MAX(#{quoted_column_name})")).first + next_batch_bounds = batch.pick(Arel.sql("MIN(#{quoted_column_name}), MAX(#{quoted_column_name})")) break end diff --git a/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb b/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb index c2f59bf9c76..1ffa4a052e5 100644 --- a/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb +++ b/lib/gitlab/background_migration/batching_strategies/primary_key_batching_strategy.rb @@ -24,11 +24,19 @@ module Gitlab quoted_column_name = model_class.connection.quote_column_name(column_name) relation = model_class.where("#{quoted_column_name} >= ?", batch_min_value) + + if job_class + relation = filter_batch(relation, + table_name: table_name, column_name: column_name, + job_class: job_class, job_arguments: job_arguments + ) + end + relation = apply_additional_filters(relation, job_arguments: job_arguments, job_class: job_class) next_batch_bounds = nil relation.each_batch(of: batch_size, column: column_name) do |batch| # rubocop:disable Lint/UnreachableLoop - next_batch_bounds = batch.pluck(Arel.sql("MIN(#{quoted_column_name}), MAX(#{quoted_column_name})")).first + next_batch_bounds = batch.pick(Arel.sql("MIN(#{quoted_column_name}), MAX(#{quoted_column_name})")) break end @@ -36,13 +44,27 @@ module Gitlab next_batch_bounds end + # Deprecated + # + # Use `scope_to` to define additional filters on the migration job class. + # + # see https://docs.gitlab.com/ee/development/database/batched_background_migrations.html#adding-additional-filters. def apply_additional_filters(relation, job_arguments: [], job_class: nil) - if job_class.respond_to?(:batching_scope) - return job_class.batching_scope(relation, job_arguments: job_arguments) - end - relation end + + private + + def filter_batch(relation, table_name:, column_name:, job_class:, job_arguments: []) + return relation unless job_class.respond_to?(:generic_instance) + + job = job_class.generic_instance( + batch_table: table_name, batch_column: column_name, + job_arguments: job_arguments, connection: connection + ) + + job.filter_batch(relation) + end end end end diff --git a/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects.rb b/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects.rb index cb9b0e88ef4..4da120769a0 100644 --- a/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects.rb +++ b/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects.rb @@ -62,7 +62,7 @@ module Gitlab batch = LfsObjectsProject.where(id: start_id..end_id) batch.each_batch(of: SUB_BATCH_SIZE) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(lfs_objects_projects.id), max(lfs_objects_projects.id)')).first + first, last = sub_batch.pick(Arel.sql('min(lfs_objects_projects.id), max(lfs_objects_projects.id)')) lfs_objects_without_association = LfsObjectsProject diff --git a/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans.rb b/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans.rb deleted file mode 100644 index 107ac9b0c3b..00000000000 --- a/lib/gitlab/background_migration/copy_ci_builds_columns_to_security_scans.rb +++ /dev/null @@ -1,44 +0,0 @@ -# frozen_string_literal: true -# rubocop:disable Style/Documentation - -module Gitlab - module BackgroundMigration - class CopyCiBuildsColumnsToSecurityScans - extend ::Gitlab::Utils::Override - - UPDATE_BATCH_SIZE = 500 - - def perform(start_id, stop_id) - (start_id..stop_id).step(UPDATE_BATCH_SIZE).each do |offset| - batch_start = offset - batch_stop = offset + UPDATE_BATCH_SIZE - 1 - - ActiveRecord::Base.connection.execute <<~SQL - UPDATE - security_scans - SET - project_id = ci_builds.project_id, - pipeline_id = ci_builds.commit_id - FROM ci_builds - WHERE ci_builds.type='Ci::Build' - AND ci_builds.id=security_scans.build_id - AND security_scans.id BETWEEN #{Integer(batch_start)} AND #{Integer(batch_stop)} - SQL - end - - mark_job_as_succeeded(start_id, stop_id) - rescue StandardError => error - Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error) - end - - private - - def mark_job_as_succeeded(*arguments) - Gitlab::Database::BackgroundMigrationJob.mark_all_as_succeeded( - 'CopyCiBuildsColumnsToSecurityScans', - arguments - ) - end - end - end -end diff --git a/lib/gitlab/background_migration/copy_column_using_background_migration_job.rb b/lib/gitlab/background_migration/copy_column_using_background_migration_job.rb index 826845935b8..15e54431a44 100644 --- a/lib/gitlab/background_migration/copy_column_using_background_migration_job.rb +++ b/lib/gitlab/background_migration/copy_column_using_background_migration_job.rb @@ -14,7 +14,9 @@ module Gitlab # - The table that is migrated does _not_ need `id` as the primary key # We use the provided primary_key column to perform the update. class CopyColumnUsingBackgroundMigrationJob < BatchedMigrationJob - def perform(copy_from, copy_to) + job_arguments :copy_from, :copy_to + + def perform assignment_clauses = build_assignment_clauses(copy_from, copy_to) each_sub_batch(operation_name: :update_all) do |relation| diff --git a/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects.rb b/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects.rb new file mode 100644 index 00000000000..019c3d15b3e --- /dev/null +++ b/lib/gitlab/background_migration/disable_legacy_open_source_license_for_no_issues_no_repo_projects.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # Set `project_settings.legacy_open_source_license_available` to false for public projects with no issues & no repo + class DisableLegacyOpenSourceLicenseForNoIssuesNoRepoProjects < ::Gitlab::BackgroundMigration::BatchedMigrationJob + PUBLIC = 20 + + # Migration only version of `project_settings` table + class ProjectSetting < ApplicationRecord + self.table_name = 'project_settings' + end + + def perform + each_sub_batch( + operation_name: :disable_legacy_open_source_license_for_no_issues_no_repo_projects, + batching_scope: ->(relation) { relation.where(visibility_level: PUBLIC) } + ) do |sub_batch| + no_issues_no_repo_projects = + sub_batch + .joins('LEFT OUTER JOIN project_statistics ON project_statistics.project_id = projects.id') + .joins('LEFT OUTER JOIN project_settings ON project_settings.project_id = projects.id') + .joins('LEFT OUTER JOIN issues ON issues.project_id = projects.id') + .where('project_statistics.repository_size' => 0, + 'project_settings.legacy_open_source_license_available' => true) + .group('projects.id') + .having('COUNT(issues.id) = 0') + + ProjectSetting + .where(project_id: no_issues_no_repo_projects) + .update_all(legacy_open_source_license_available: false) + end + end + end + end +end diff --git a/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects.rb b/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects.rb new file mode 100644 index 00000000000..3a9049b1f19 --- /dev/null +++ b/lib/gitlab/background_migration/disable_legacy_open_source_license_for_one_member_no_repo_projects.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # Set `project_settings.legacy_open_source_license_available` to false for public projects with 1 member and no repo + class DisableLegacyOpenSourceLicenseForOneMemberNoRepoProjects < ::Gitlab::BackgroundMigration::BatchedMigrationJob + PUBLIC = 20 + + # Migration only version of `project_settings` table + class ProjectSetting < ApplicationRecord + self.table_name = 'project_settings' + end + + def perform + each_sub_batch( + operation_name: :disable_legacy_open_source_license_for_one_member_no_repo_projects, + batching_scope: ->(relation) { relation.where(visibility_level: PUBLIC) } + ) do |sub_batch| + one_member_no_repo_projects = + sub_batch + .joins('LEFT OUTER JOIN project_statistics ON project_statistics.project_id = projects.id') + .joins('LEFT OUTER JOIN project_settings ON project_settings.project_id = projects.id') + .joins('LEFT OUTER JOIN project_authorizations ON project_authorizations.project_id = projects.id') + .where('project_statistics.repository_size' => 0, + 'project_settings.legacy_open_source_license_available' => true) + .group('projects.id') + .having('COUNT(project_authorizations.user_id) = 1') + + ProjectSetting + .where(project_id: one_member_no_repo_projects) + .update_all(legacy_open_source_license_available: false) + end + end + end + end +end diff --git a/lib/gitlab/background_migration/drop_invalid_security_findings.rb b/lib/gitlab/background_migration/drop_invalid_security_findings.rb index 87551bb1b1e..000628e109c 100644 --- a/lib/gitlab/background_migration/drop_invalid_security_findings.rb +++ b/lib/gitlab/background_migration/drop_invalid_security_findings.rb @@ -19,7 +19,7 @@ module Gitlab .no_uuid ranged_query.each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(id), max(id)')).first + first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) # The query need to be reconstructed because .each_batch modifies the default scope # See: https://gitlab.com/gitlab-org/gitlab/-/issues/330510 diff --git a/lib/gitlab/background_migration/encrypt_static_object_token.rb b/lib/gitlab/background_migration/encrypt_static_object_token.rb index a087d2529eb..e1805d40bab 100644 --- a/lib/gitlab/background_migration/encrypt_static_object_token.rb +++ b/lib/gitlab/background_migration/encrypt_static_object_token.rb @@ -23,7 +23,7 @@ module Gitlab .without_static_object_token_encrypted ranged_query.each_batch(of: BATCH_SIZE) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(id), max(id)')).first + first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) batch_query = User.unscoped .where(id: first..last) diff --git a/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb b/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb index 8f785476aa0..6de2187b8e3 100644 --- a/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb +++ b/lib/gitlab/background_migration/fix_first_mentioned_in_commit_at.rb @@ -29,7 +29,7 @@ module Gitlab def perform(start_id, end_id) scope(start_id, end_id).each_batch(of: SUB_BATCH_SIZE, column: :issue_id) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(issue_id), max(issue_id)')).first + first, last = sub_batch.pick(Arel.sql('min(issue_id), max(issue_id)')) # The query need to be reconstructed because .each_batch modifies the default scope # See: https://gitlab.com/gitlab-org/gitlab/-/issues/330510 diff --git a/lib/gitlab/background_migration/fix_projects_without_prometheus_service.rb b/lib/gitlab/background_migration/fix_projects_without_prometheus_service.rb index 496ec0bd0a1..97a9913fa74 100644 --- a/lib/gitlab/background_migration/fix_projects_without_prometheus_service.rb +++ b/lib/gitlab/background_migration/fix_projects_without_prometheus_service.rb @@ -137,7 +137,7 @@ module Gitlab def create_sql(from_id, to_id) <<~SQL WITH created_records AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported} ( - INSERT INTO services (project_id, #{DEFAULTS.keys.map { |key| %("#{key}")}.join(',')}, created_at, updated_at) + INSERT INTO services (project_id, #{DEFAULTS.keys.map { |key| %("#{key}") }.join(',')}, created_at, updated_at) #{select_insert_values_sql(from_id, to_id)} RETURNING * ) diff --git a/lib/gitlab/background_migration/migrate_shared_vulnerability_scanners.rb b/lib/gitlab/background_migration/migrate_shared_vulnerability_scanners.rb new file mode 100644 index 00000000000..bea0120f093 --- /dev/null +++ b/lib/gitlab/background_migration/migrate_shared_vulnerability_scanners.rb @@ -0,0 +1,16 @@ +# frozen_string_literal: true + +module Gitlab + module BackgroundMigration + # rubocop: disable Style/Documentation + class MigrateSharedVulnerabilityScanners < BatchedMigrationJob + def perform + end + end + # rubocop: enable Style/Documentation + end +end + +# rubocop: disable Layout/LineLength +Gitlab::BackgroundMigration::MigrateSharedVulnerabilityScanners.prepend_mod_with("Gitlab::BackgroundMigration::MigrateSharedVulnerabilityScanners") +# rubocop: enable Layout/LineLength diff --git a/lib/gitlab/background_migration/populate_status_column_of_security_scans.rb b/lib/gitlab/background_migration/populate_status_column_of_security_scans.rb deleted file mode 100644 index 9740bcaa86b..00000000000 --- a/lib/gitlab/background_migration/populate_status_column_of_security_scans.rb +++ /dev/null @@ -1,13 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module BackgroundMigration - class PopulateStatusColumnOfSecurityScans # rubocop:disable Style/Documentation - def perform(_start_id, _end_id) - # no-op - end - end - end -end - -Gitlab::BackgroundMigration::PopulateStatusColumnOfSecurityScans.prepend_mod diff --git a/lib/gitlab/background_migration/populate_vulnerability_reads.rb b/lib/gitlab/background_migration/populate_vulnerability_reads.rb index 5e6475a3d1a..656c62d9ee5 100644 --- a/lib/gitlab/background_migration/populate_vulnerability_reads.rb +++ b/lib/gitlab/background_migration/populate_vulnerability_reads.rb @@ -10,7 +10,7 @@ module Gitlab def perform(start_id, end_id, sub_batch_size) vulnerability_model.where(id: start_id..end_id).each_batch(of: sub_batch_size) do |sub_batch| - first, last = sub_batch.pluck(Arel.sql('min(id), max(id)')).first + first, last = sub_batch.pick(Arel.sql('min(id), max(id)')) connection.execute(insert_query(first, last)) sleep PAUSE_SECONDS diff --git a/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb index 2b27bad3497..845a3c16bbe 100644 --- a/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb +++ b/lib/gitlab/background_migration/project_namespaces/backfill_project_namespaces.rb @@ -105,9 +105,11 @@ module Gitlab .joins("INNER JOIN namespaces n2 ON namespaces.parent_id = n2.id") .select("namespaces.id as project_namespace_id, n2.traversal_ids") + # some customers have namespaces.id column type as bigint, which makes array_append(integer[], bigint) to fail + # so we just explicitly cast arguments to compatible types ApplicationRecord.connection.execute <<~SQL UPDATE namespaces - SET traversal_ids = array_append(project_namespaces.traversal_ids, project_namespaces.project_namespace_id) + SET traversal_ids = array_append(project_namespaces.traversal_ids::bigint[], project_namespaces.project_namespace_id::bigint) FROM (#{namespaces.to_sql}) as project_namespaces(project_namespace_id, traversal_ids) WHERE id = project_namespaces.project_namespace_id SQL diff --git a/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url.rb b/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url.rb index bba1ca26b35..e9a38916999 100644 --- a/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url.rb +++ b/lib/gitlab/background_migration/update_jira_tracker_data_deployment_type_based_on_url.rb @@ -1,42 +1,74 @@ # frozen_string_literal: true # rubocop: disable Style/Documentation -class Gitlab::BackgroundMigration::UpdateJiraTrackerDataDeploymentTypeBasedOnUrl - # rubocop: disable Gitlab/NamespacedClass - class JiraTrackerData < ActiveRecord::Base - self.table_name = "jira_tracker_data" - self.inheritance_column = :_type_disabled +module Gitlab + module BackgroundMigration + class UpdateJiraTrackerDataDeploymentTypeBasedOnUrl < Gitlab::BackgroundMigration::BatchedMigrationJob + # rubocop: disable Gitlab/NamespacedClass + class JiraTrackerData < ActiveRecord::Base + self.table_name = "jira_tracker_data" + self.inheritance_column = :_type_disabled - include ::Integrations::BaseDataFields - attr_encrypted :url, encryption_options - attr_encrypted :api_url, encryption_options + include ::Integrations::BaseDataFields + attr_encrypted :url, encryption_options + attr_encrypted :api_url, encryption_options - enum deployment_type: { unknown: 0, server: 1, cloud: 2 }, _prefix: :deployment - end - # rubocop: enable Gitlab/NamespacedClass + enum deployment_type: { unknown: 0, server: 1, cloud: 2 }, _prefix: :deployment + end + # rubocop: enable Gitlab/NamespacedClass - # https://rubular.com/r/uwgK7k9KH23efa - JIRA_CLOUD_REGEX = %r{^https?://[A-Za-z0-9](?:[A-Za-z0-9\-]{0,61}[A-Za-z0-9])?\.atlassian\.net$}ix.freeze + # https://rubular.com/r/uwgK7k9KH23efa + JIRA_CLOUD_REGEX = %r{^https?://[A-Za-z0-9](?:[A-Za-z0-9\-]{0,61}[A-Za-z0-9])?\.atlassian\.net$}ix.freeze - # rubocop: disable CodeReuse/ActiveRecord - def perform(start_id, end_id) - trackers_data = JiraTrackerData - .where(deployment_type: 'unknown') - .where(id: start_id..end_id) + def perform + cloud = [] + server = [] + unknown = [] - cloud, server = trackers_data.partition { |tracker_data| tracker_data.url.match?(JIRA_CLOUD_REGEX) } + trackers_data.each do |tracker_data| + client_url = tracker_data.api_url.presence || tracker_data.url - cloud_mappings = cloud.each_with_object({}) do |tracker_data, hash| - hash[tracker_data] = { deployment_type: 2 } - end + if client_url.blank? + unknown << tracker_data + elsif client_url.match?(JIRA_CLOUD_REGEX) + cloud << tracker_data + else + server << tracker_data + end + end - server_mapppings = server.each_with_object({}) do |tracker_data, hash| - hash[tracker_data] = { deployment_type: 1 } - end + cloud_mappings = cloud.each_with_object({}) do |tracker_data, hash| + hash[tracker_data] = { deployment_type: 2 } + end + + server_mappings = server.each_with_object({}) do |tracker_data, hash| + hash[tracker_data] = { deployment_type: 1 } + end + + unknown_mappings = unknown.each_with_object({}) do |tracker_data, hash| + hash[tracker_data] = { deployment_type: 0 } + end - mappings = cloud_mappings.merge(server_mapppings) + mappings = cloud_mappings.merge(server_mappings, unknown_mappings) - ::Gitlab::Database::BulkUpdate.execute(%i[deployment_type], mappings) + update_records(mappings) + end + + private + + def update_records(mappings) + return if mappings.empty? + + ::Gitlab::Database::BulkUpdate.execute(%i[deployment_type], mappings) + end + + # rubocop: disable CodeReuse/ActiveRecord + def trackers_data + @trackers_data ||= JiraTrackerData + .where(deployment_type: 'unknown') + .where(batch_column => start_id..end_id) + end + # rubocop: enable CodeReuse/ActiveRecord + end end - # rubocop: enable CodeReuse/ActiveRecord end diff --git a/lib/gitlab/background_migration/update_timelogs_null_spent_at.rb b/lib/gitlab/background_migration/update_timelogs_null_spent_at.rb index 38932e52bb0..b61f2ee7f4c 100644 --- a/lib/gitlab/background_migration/update_timelogs_null_spent_at.rb +++ b/lib/gitlab/background_migration/update_timelogs_null_spent_at.rb @@ -12,7 +12,7 @@ module Gitlab define_batchable_model('timelogs', connection: connection) .where(spent_at: nil, id: start_id..stop_id) .each_batch(of: 100) do |subbatch| - batch_start, batch_end = subbatch.pluck('min(id), max(id)').first + batch_start, batch_end = subbatch.pick('min(id), max(id)') update_timelogs(batch_start, batch_end) end diff --git a/lib/gitlab/background_task.rb b/lib/gitlab/background_task.rb new file mode 100644 index 00000000000..1f03e32844c --- /dev/null +++ b/lib/gitlab/background_task.rb @@ -0,0 +1,95 @@ +# frozen_string_literal: true + +module Gitlab + # Used to run small workloads concurrently to other threads in the current process. + # This may be necessary when accessing process state, which cannot be done via + # Sidekiq jobs. + # + # Since the given task is put on its own thread, use instances sparingly and only + # for fast computations since they will compete with other threads such as Puma + # or Sidekiq workers for CPU time and memory. + # + # Good examples: + # - Polling and updating process counters + # - Observing process or thread state + # - Enforcing process limits at the application level + # + # Bad examples: + # - Running database queries + # - Running CPU bound work loads + # + # As a guideline, aim to yield frequently if tasks execute logic in loops by + # making each iteration cheap. If life-cycle callbacks like start and stop + # aren't necessary and the task does not loop, consider just using Thread.new. + # + # rubocop: disable Gitlab/NamespacedClass + class BackgroundTask + AlreadyStartedError = Class.new(StandardError) + + attr_reader :name + + def running? + @state == :running + end + + # Possible options: + # - name [String] used to identify the task in thread listings and logs (defaults to 'background_task') + # - synchronous [Boolean] if true, turns `start` into a blocking call + def initialize(task, **options) + @task = task + @synchronous = options[:synchronous] + @name = options[:name] || self.class.name.demodulize.underscore + # We use a monitor, not a Mutex, because monitors allow for re-entrant locking. + @mutex = ::Monitor.new + @state = :idle + end + + def start + @mutex.synchronize do + raise AlreadyStartedError, "background task #{name} already running on #{@thread}" if running? + + start_task = @task.respond_to?(:start) ? @task.start : true + + if start_task + @state = :running + + at_exit { stop } + + @thread = Thread.new do + Thread.current.name = name + @task.call + end + + @thread.join if @synchronous + end + end + + self + end + + def stop + @mutex.synchronize do + break unless running? + + if @thread + # If thread is not in a stopped state, interrupt it because it may be sleeping. + # This is so we process a stop signal ASAP. + @thread.wakeup if @thread.alive? + begin + # Propagate stop event if supported. + @task.stop if @task.respond_to?(:stop) + + # join will rethrow any error raised on the background thread + @thread.join unless Thread.current == @thread + rescue Exception => ex # rubocop:disable Lint/RescueException + Gitlab::ErrorTracking.track_exception(ex, extra: { reported_by: name }) + end + @thread = nil + end + + @state = :stopped + end + end + end + # rubocop: enable Gitlab/NamespacedClass +end diff --git a/lib/gitlab/batch_pop_queueing.rb b/lib/gitlab/batch_pop_queueing.rb index 62fc8cd048e..103ce644f2b 100644 --- a/lib/gitlab/batch_pop_queueing.rb +++ b/lib/gitlab/batch_pop_queueing.rb @@ -73,7 +73,7 @@ module Gitlab begin all_args = pop_all - yield all_args if block_given? + yield all_args if block { status: :finished, new_items: peek_all } ensure diff --git a/lib/gitlab/cache/request_cache.rb b/lib/gitlab/cache/request_cache.rb index 3ad919fbba8..13b4cace08a 100644 --- a/lib/gitlab/cache/request_cache.rb +++ b/lib/gitlab/cache/request_cache.rb @@ -15,7 +15,7 @@ module Gitlab attr_accessor :request_cache_key_block def request_cache_key(&block) - if block_given? + if block self.request_cache_key_block = block else request_cache_key_block diff --git a/lib/gitlab/chat_name_token.rb b/lib/gitlab/chat_name_token.rb index 9b4cb9d0134..76f2a4ae38c 100644 --- a/lib/gitlab/chat_name_token.rb +++ b/lib/gitlab/chat_name_token.rb @@ -16,7 +16,9 @@ module Gitlab def get Gitlab::Redis::SharedState.with do |redis| data = redis.get(redis_shared_state_key) - Gitlab::Json.parse(data, symbolize_names: true) if data + params = Gitlab::Json.parse(data, symbolize_names: true) if data + params[:integration_id] ||= params.delete(:service_id) if params && params[:service_id] + params end end diff --git a/lib/gitlab/ci/artifacts/logger.rb b/lib/gitlab/ci/artifacts/logger.rb new file mode 100644 index 00000000000..628f4129df4 --- /dev/null +++ b/lib/gitlab/ci/artifacts/logger.rb @@ -0,0 +1,64 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Artifacts + module Logger + def log_artifacts_filesize(artifact_file) + return if artifact_file.nil? + + unless artifact_file.is_a?(::Ci::Artifactable) + raise ArgumentError, "unknown artifact file class `#{artifact_file.class}`" + end + + ::Gitlab::ApplicationContext.push(artifact: artifact_file) + end + + def log_artifacts_context(job) + ::Gitlab::ApplicationContext.push( + namespace: job&.project&.namespace, + project: job&.project, + job: job + ) + end + + def log_build_dependencies(size:, count: 0) + ::Gitlab::ApplicationContext.push( + artifacts_dependencies_size: size, + artifacts_dependencies_count: count + ) + end + + def self.log_created(artifact) + payload = Gitlab::ApplicationContext.current.merge( + message: 'Artifact created', + job_artifact_id: artifact.id, + size: artifact.size, + type: artifact.file_type, + build_id: artifact.job_id, + project_id: artifact.project_id + ) + + Gitlab::AppLogger.info(payload) + end + + def self.log_deleted(job_artifacts, method) + Array(job_artifacts).each do |artifact| + payload = Gitlab::ApplicationContext.current.merge( + message: 'Artifact deleted', + job_artifact_id: artifact.id, + expire_at: artifact.expire_at, + size: artifact.size, + type: artifact.file_type, + build_id: artifact.job_id, + project_id: artifact.project_id, + method: method + ) + + Gitlab::AppLogger.info(payload) + end + end + end + end + end +end diff --git a/lib/gitlab/ci/artifacts/metrics.rb b/lib/gitlab/ci/artifacts/metrics.rb index 03459c4bf36..59930426cd5 100644 --- a/lib/gitlab/ci/artifacts/metrics.rb +++ b/lib/gitlab/ci/artifacts/metrics.rb @@ -6,6 +6,13 @@ module Gitlab class Metrics include Gitlab::Utils::StrongMemoize + def self.build_completed_report_type_counter(report_type) + name = "artifact_report_#{report_type}_builds_completed_total".to_sym + comment = "Number of completed builds with #{report_type} report artifacts" + + ::Gitlab::Metrics.counter(name, comment) + end + def increment_destroyed_artifacts_count(size) destroyed_artifacts_counter.increment({}, size.to_i) end diff --git a/lib/gitlab/ci/build/artifacts/adapters/zip_stream.rb b/lib/gitlab/ci/build/artifacts/adapters/zip_stream.rb new file mode 100644 index 00000000000..690a47097c6 --- /dev/null +++ b/lib/gitlab/ci/build/artifacts/adapters/zip_stream.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Build + module Artifacts + module Adapters + class ZipStream + MAX_DECOMPRESSED_SIZE = 100.megabytes + MAX_FILES_PROCESSED = 50 + + attr_reader :stream + + InvalidStreamError = Class.new(StandardError) + + def initialize(stream) + raise InvalidStreamError, "Stream is required" unless stream + + @stream = stream + @files_processed = 0 + end + + def each_blob + Zip::InputStream.open(stream) do |zio| + while entry = zio.get_next_entry + break if at_files_processed_limit? + next unless should_process?(entry) + + @files_processed += 1 + + yield entry.get_input_stream.read + end + end + end + + private + + def should_process?(entry) + file?(entry) && !too_large?(entry) + end + + def file?(entry) + # Check the file name as a workaround for incorrect + # file type detection when using InputStream + # https://github.com/rubyzip/rubyzip/issues/533 + entry.file? && !entry.name.end_with?('/') + end + + def too_large?(entry) + entry.size > MAX_DECOMPRESSED_SIZE + end + + def at_files_processed_limit? + @files_processed >= MAX_FILES_PROCESSED + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/build/releaser.rb b/lib/gitlab/ci/build/releaser.rb index 9720bb1123a..09717516aa4 100644 --- a/lib/gitlab/ci/build/releaser.rb +++ b/lib/gitlab/ci/build/releaser.rb @@ -5,7 +5,7 @@ module Gitlab module Build class Releaser BASE_COMMAND = 'release-cli create' - SINGLE_FLAGS = %i[name description tag_name ref released_at].freeze + SINGLE_FLAGS = %i[name description tag_name tag_message ref released_at].freeze ARRAY_FLAGS = %i[milestones].freeze attr_reader :config diff --git a/lib/gitlab/ci/build/rules.rb b/lib/gitlab/ci/build/rules.rb index 2d4f9cf635b..dee95534b07 100644 --- a/lib/gitlab/ci/build/rules.rb +++ b/lib/gitlab/ci/build/rules.rb @@ -6,7 +6,7 @@ module Gitlab class Rules include ::Gitlab::Utils::StrongMemoize - Result = Struct.new(:when, :start_in, :allow_failure, :variables) do + Result = Struct.new(:when, :start_in, :allow_failure, :variables, :errors) do def build_attributes { when: self.when, @@ -38,6 +38,8 @@ module Gitlab else Result.new('never') end + rescue Rule::Clause::ParseError => e + Result.new('never', nil, nil, nil, [e.message]) end private diff --git a/lib/gitlab/ci/build/rules/rule/clause.rb b/lib/gitlab/ci/build/rules/rule/clause.rb index 6d4bbbb8c21..503f2a87361 100644 --- a/lib/gitlab/ci/build/rules/rule/clause.rb +++ b/lib/gitlab/ci/build/rules/rule/clause.rb @@ -11,6 +11,7 @@ module Gitlab # Used for job's inclusion rules configuration. # UnknownClauseError = Class.new(StandardError) + ParseError = Class.new(StandardError) def self.fabricate(type, value) "#{self}::#{type.to_s.camelize}".safe_constantize&.new(value) diff --git a/lib/gitlab/ci/build/rules/rule/clause/changes.rb b/lib/gitlab/ci/build/rules/rule/clause/changes.rb index 1bcd87c9d93..1034f5eacef 100644 --- a/lib/gitlab/ci/build/rules/rule/clause/changes.rb +++ b/lib/gitlab/ci/build/rules/rule/clause/changes.rb @@ -11,10 +11,12 @@ module Gitlab end def satisfied_by?(pipeline, context) - return true unless pipeline&.modified_paths + modified_paths = find_modified_paths(pipeline) + + return true unless modified_paths expanded_globs = expand_globs(context) - pipeline.modified_paths.any? do |path| + modified_paths.any? do |path| expanded_globs.any? do |glob| File.fnmatch?(glob, path, File::FNM_PATHNAME | File::FNM_DOTMATCH | File::FNM_EXTGLOB) end @@ -33,13 +35,31 @@ module Gitlab def paths strong_memoize(:paths) do - if @globs.is_a?(Array) - @globs - else - Array(@globs[:paths]) - end + Array(@globs[:paths]) end end + + def find_modified_paths(pipeline) + return unless pipeline + return pipeline.modified_paths unless ::Feature.enabled?(:ci_rules_changes_compare, pipeline.project) + + compare_to_sha = find_compare_to_sha(pipeline) + + if compare_to_sha + pipeline.modified_paths_since(compare_to_sha) + else + pipeline.modified_paths + end + end + + def find_compare_to_sha(pipeline) + return unless @globs.include?(:compare_to) + + commit = pipeline.project.commit(@globs[:compare_to]) + raise Rules::Rule::Clause::ParseError, 'rules:changes:compare_to is not a valid ref' unless commit + + commit.sha + end end end end diff --git a/lib/gitlab/ci/config.rb b/lib/gitlab/ci/config.rb index 15a4ff91c1b..438fa1cb3b2 100644 --- a/lib/gitlab/ci/config.rb +++ b/lib/gitlab/ci/config.rb @@ -81,6 +81,10 @@ module Gitlab root.jobs_value end + def workflow_rules + root.workflow_entry.rules_value + end + def normalized_jobs @normalized_jobs ||= Ci::Config::Normalizer.new(jobs).normalize_jobs end diff --git a/lib/gitlab/ci/config/entry/image.rb b/lib/gitlab/ci/config/entry/image.rb index 96ac959a3f4..613f7ff3370 100644 --- a/lib/gitlab/ci/config/entry/image.rb +++ b/lib/gitlab/ci/config/entry/image.rb @@ -8,37 +8,13 @@ module Gitlab # Entry that represents a Docker image. # class Image < ::Gitlab::Config::Entry::Node - include ::Gitlab::Config::Entry::Validatable - include ::Gitlab::Config::Entry::Attributable - include ::Gitlab::Config::Entry::Configurable - - ALLOWED_KEYS = %i[name entrypoint ports pull_policy].freeze - LEGACY_ALLOWED_KEYS = %i[name entrypoint ports].freeze + include ::Gitlab::Ci::Config::Entry::Imageable validations do - validates :config, hash_or_string: true - validates :config, allowed_keys: ALLOWED_KEYS, if: :ci_docker_image_pull_policy_enabled? - validates :config, allowed_keys: LEGACY_ALLOWED_KEYS, unless: :ci_docker_image_pull_policy_enabled? - validates :config, disallowed_keys: %i[ports], unless: :with_image_ports? - - validates :name, type: String, presence: true - validates :entrypoint, array_of_strings: true, allow_nil: true - end - - entry :ports, Entry::Ports, - description: 'Ports used to expose the image' - - entry :pull_policy, Entry::PullPolicy, - description: 'Pull policy for the image' - - attributes :ports, :pull_policy - - def name - value[:name] - end - - def entrypoint - value[:entrypoint] + validates :config, allowed_keys: IMAGEABLE_ALLOWED_KEYS, + if: :ci_docker_image_pull_policy_enabled? + validates :config, allowed_keys: IMAGEABLE_LEGACY_ALLOWED_KEYS, + unless: :ci_docker_image_pull_policy_enabled? end def value @@ -55,18 +31,6 @@ module Gitlab {} end end - - def with_image_ports? - opt(:with_image_ports) - end - - def ci_docker_image_pull_policy_enabled? - ::Feature.enabled?(:ci_docker_image_pull_policy) - end - - def skip_config_hash_validation? - true - end end end end diff --git a/lib/gitlab/ci/config/entry/imageable.rb b/lib/gitlab/ci/config/entry/imageable.rb new file mode 100644 index 00000000000..f045ee3d549 --- /dev/null +++ b/lib/gitlab/ci/config/entry/imageable.rb @@ -0,0 +1,61 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + class Config + module Entry + ## + # Represents Imageable concern shared by Image and Service. + module Imageable + extend ActiveSupport::Concern + + include ::Gitlab::Config::Entry::Attributable + include ::Gitlab::Config::Entry::Configurable + + IMAGEABLE_ALLOWED_KEYS = %i[name entrypoint ports pull_policy].freeze + IMAGEABLE_LEGACY_ALLOWED_KEYS = %i[name entrypoint ports].freeze + + included do + include ::Gitlab::Config::Entry::Validatable + + validations do + validates :config, hash_or_string: true + validates :config, disallowed_keys: %i[ports], unless: :with_image_ports? + + validates :name, type: String, presence: true + validates :entrypoint, array_of_strings: true, allow_nil: true + end + + attributes :ports, :pull_policy + + entry :ports, Entry::Ports, + description: 'Ports used to expose the image/service' + + entry :pull_policy, Entry::PullPolicy, + description: 'Pull policy for the image/service' + end + + def name + value[:name] + end + + def entrypoint + value[:entrypoint] + end + + def with_image_ports? + opt(:with_image_ports) + end + + def ci_docker_image_pull_policy_enabled? + ::Feature.enabled?(:ci_docker_image_pull_policy) + end + + def skip_config_hash_validation? + true + end + end + end + end + end +end diff --git a/lib/gitlab/ci/config/entry/processable.rb b/lib/gitlab/ci/config/entry/processable.rb index 78794f524f4..975da8662e1 100644 --- a/lib/gitlab/ci/config/entry/processable.rb +++ b/lib/gitlab/ci/config/entry/processable.rb @@ -120,7 +120,7 @@ module Gitlab stage: stage_value, extends: extends, rules: rules_value, - job_variables: variables_value.to_h, + job_variables: variables_entry.value_with_data, root_variables_inheritance: root_variables_inheritance, only: only_value, except: except_value, diff --git a/lib/gitlab/ci/config/entry/release.rb b/lib/gitlab/ci/config/entry/release.rb index 7e504c24ade..2be0eae120b 100644 --- a/lib/gitlab/ci/config/entry/release.rb +++ b/lib/gitlab/ci/config/entry/release.rb @@ -12,8 +12,8 @@ module Gitlab include ::Gitlab::Config::Entry::Validatable include ::Gitlab::Config::Entry::Attributable - ALLOWED_KEYS = %i[tag_name name description ref released_at milestones assets].freeze - attributes %i[tag_name name ref milestones assets].freeze + ALLOWED_KEYS = %i[tag_name tag_message name description ref released_at milestones assets].freeze + attributes %i[tag_name tag_message name ref milestones assets].freeze attr_reader :released_at # Attributable description conflicts with @@ -31,6 +31,7 @@ module Gitlab validations do validates :config, allowed_keys: ALLOWED_KEYS validates :tag_name, type: String, presence: true + validates :tag_message, type: String, allow_blank: true validates :description, type: String, presence: true validates :milestones, array_of_strings_or_string: true, allow_blank: true validate do diff --git a/lib/gitlab/ci/config/entry/reports.rb b/lib/gitlab/ci/config/entry/reports.rb index d5d204bb995..f77876cc926 100644 --- a/lib/gitlab/ci/config/entry/reports.rb +++ b/lib/gitlab/ci/config/entry/reports.rb @@ -17,7 +17,7 @@ module Gitlab dast performance browser_performance load_performance license_scanning metrics lsif dotenv terraform accessibility requirements coverage_fuzzing api_fuzzing cluster_image_scanning - coverage_report].freeze + coverage_report cyclonedx].freeze attributes ALLOWED_KEYS @@ -48,6 +48,7 @@ module Gitlab validates :terraform, array_of_strings_or_string: true validates :accessibility, array_of_strings_or_string: true validates :requirements, array_of_strings_or_string: true + validates :cyclonedx, array_of_strings_or_string: true end end diff --git a/lib/gitlab/ci/config/entry/rules/rule/changes.rb b/lib/gitlab/ci/config/entry/rules/rule/changes.rb index a56b928450a..107e7c228af 100644 --- a/lib/gitlab/ci/config/entry/rules/rule/changes.rb +++ b/lib/gitlab/ci/config/entry/rules/rule/changes.rb @@ -30,7 +30,7 @@ module Gitlab include ::Gitlab::Config::Entry::Validatable include ::Gitlab::Config::Entry::Attributable - ALLOWED_KEYS = %i[paths].freeze + ALLOWED_KEYS = %i[paths compare_to].freeze REQUIRED_KEYS = %i[paths].freeze attributes ALLOWED_KEYS @@ -43,6 +43,7 @@ module Gitlab validates :paths, array_of_strings: true, length: { maximum: 50, too_long: "has too many entries (maximum %{count})" } + validates :compare_to, type: String, allow_nil: true end end end diff --git a/lib/gitlab/ci/config/entry/service.rb b/lib/gitlab/ci/config/entry/service.rb index 1a35f7de6cf..0e19447dff8 100644 --- a/lib/gitlab/ci/config/entry/service.rb +++ b/lib/gitlab/ci/config/entry/service.rb @@ -7,41 +7,28 @@ module Gitlab ## # Entry that represents a configuration of Docker service. # - # TODO: remove duplication with Image superclass by defining a common - # Imageable concern. - # https://gitlab.com/gitlab-org/gitlab/issues/208774 class Service < ::Gitlab::Config::Entry::Node - include ::Gitlab::Config::Entry::Validatable - include ::Gitlab::Config::Entry::Attributable - include ::Gitlab::Config::Entry::Configurable + include ::Gitlab::Ci::Config::Entry::Imageable - ALLOWED_KEYS = %i[name entrypoint command alias ports variables pull_policy].freeze - LEGACY_ALLOWED_KEYS = %i[name entrypoint command alias ports variables].freeze + ALLOWED_KEYS = %i[command alias variables].freeze + LEGACY_ALLOWED_KEYS = %i[command alias variables].freeze validations do - validates :config, hash_or_string: true - validates :config, allowed_keys: ALLOWED_KEYS, if: :ci_docker_image_pull_policy_enabled? - validates :config, allowed_keys: LEGACY_ALLOWED_KEYS, unless: :ci_docker_image_pull_policy_enabled? - validates :config, disallowed_keys: %i[ports], unless: :with_image_ports? - validates :name, type: String, presence: true - validates :entrypoint, array_of_strings: true, allow_nil: true + validates :config, allowed_keys: ALLOWED_KEYS + IMAGEABLE_ALLOWED_KEYS, + if: :ci_docker_image_pull_policy_enabled? + validates :config, allowed_keys: LEGACY_ALLOWED_KEYS + IMAGEABLE_LEGACY_ALLOWED_KEYS, + unless: :ci_docker_image_pull_policy_enabled? validates :command, array_of_strings: true, allow_nil: true validates :alias, type: String, allow_nil: true validates :alias, type: String, presence: true, unless: ->(record) { record.ports.blank? } end - entry :ports, Entry::Ports, - description: 'Ports used to expose the service' - - entry :pull_policy, Entry::PullPolicy, - description: 'Pull policy for the service' - entry :variables, ::Gitlab::Ci::Config::Entry::Variables, description: 'Environment variables available for this service.', inherit: false - attributes :ports, :pull_policy, :variables + attributes :variables def alias value[:alias] @@ -51,14 +38,6 @@ module Gitlab value[:command] end - def name - value[:name] - end - - def entrypoint - value[:entrypoint] - end - def value if string? { name: @config } @@ -70,18 +49,6 @@ module Gitlab {} end end - - def with_image_ports? - opt(:with_image_ports) - end - - def ci_docker_image_pull_policy_enabled? - ::Feature.enabled?(:ci_docker_image_pull_policy) - end - - def skip_config_hash_validation? - true - end end end end diff --git a/lib/gitlab/ci/config/entry/variables.rb b/lib/gitlab/ci/config/entry/variables.rb index efb469ee32a..3130aec0446 100644 --- a/lib/gitlab/ci/config/entry/variables.rb +++ b/lib/gitlab/ci/config/entry/variables.rb @@ -18,7 +18,9 @@ module Gitlab end def value - @config.to_h { |key, value| [key.to_s, expand_value(value)[:value]] } + @config.to_h do |key, data| + [key.to_s, expand_data(data)[:value]] + end end def self.default(**) @@ -26,7 +28,9 @@ module Gitlab end def value_with_data - @config.to_h { |key, value| [key.to_s, expand_value(value)] } + @config.to_h do |key, data| + [key.to_s, expand_data(data)] + end end def use_value_data? @@ -35,11 +39,11 @@ module Gitlab private - def expand_value(value) - if value.is_a?(Hash) - { value: value[:value].to_s, description: value[:description] } + def expand_data(data) + if data.is_a?(Hash) + { value: data[:value].to_s, description: data[:description] }.compact else - { value: value.to_s, description: nil } + { value: data.to_s } end end end diff --git a/lib/gitlab/ci/jwt.rb b/lib/gitlab/ci/jwt.rb index c294291e538..d3e7210b820 100644 --- a/lib/gitlab/ci/jwt.rb +++ b/lib/gitlab/ci/jwt.rb @@ -65,7 +65,7 @@ module Gitlab fields.merge!( environment: environment.name, environment_protected: environment_protected?.to_s, - deployment_tier: build.environment_deployment_tier || environment.tier + deployment_tier: build.environment_tier ) end diff --git a/lib/gitlab/ci/parsers.rb b/lib/gitlab/ci/parsers.rb index 1223d664214..b52e2d8f613 100644 --- a/lib/gitlab/ci/parsers.rb +++ b/lib/gitlab/ci/parsers.rb @@ -13,7 +13,8 @@ module Gitlab accessibility: ::Gitlab::Ci::Parsers::Accessibility::Pa11y, codequality: ::Gitlab::Ci::Parsers::Codequality::CodeClimate, sast: ::Gitlab::Ci::Parsers::Security::Sast, - secret_detection: ::Gitlab::Ci::Parsers::Security::SecretDetection + secret_detection: ::Gitlab::Ci::Parsers::Security::SecretDetection, + cyclonedx: ::Gitlab::Ci::Parsers::Sbom::Cyclonedx } end diff --git a/lib/gitlab/ci/parsers/sbom/cyclonedx.rb b/lib/gitlab/ci/parsers/sbom/cyclonedx.rb new file mode 100644 index 00000000000..deb20a2138c --- /dev/null +++ b/lib/gitlab/ci/parsers/sbom/cyclonedx.rb @@ -0,0 +1,79 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Parsers + module Sbom + class Cyclonedx + SUPPORTED_SPEC_VERSIONS = %w[1.4].freeze + COMPONENT_ATTRIBUTES = %w[type name version].freeze + + def parse!(blob, sbom_report) + @report = sbom_report + @data = Gitlab::Json.parse(blob) + + return unless valid? + + parse_report + rescue JSON::ParserError => e + report.add_error("Report JSON is invalid: #{e}") + end + + private + + attr_reader :json_data, :report, :data + + def schema_validator + @schema_validator ||= Validators::CyclonedxSchemaValidator.new(data) + end + + def valid? + valid_schema? && supported_spec_version? + end + + def supported_spec_version? + return true if SUPPORTED_SPEC_VERSIONS.include?(data['specVersion']) + + report.add_error( + "Unsupported CycloneDX spec version. Must be one of: %{versions}" \ + % { versions: SUPPORTED_SPEC_VERSIONS.join(', ') } + ) + + false + end + + def valid_schema? + return true if schema_validator.valid? + + schema_validator.errors.each { |error| report.add_error(error) } + + false + end + + def parse_report + parse_metadata_properties + parse_components + end + + def parse_metadata_properties + properties = data.dig('metadata', 'properties') + source = CyclonedxProperties.parse_source(properties) + report.set_source(source) if source + end + + def parse_components + data['components']&.each do |component| + next unless supported_component_type?(component['type']) + + report.add_component(component.slice(*COMPONENT_ATTRIBUTES)) + end + end + + def supported_component_type?(type) + ::Enums::Sbom.component_types.include?(type.to_sym) + end + end + end + end + end +end diff --git a/lib/gitlab/ci/parsers/sbom/cyclonedx_properties.rb b/lib/gitlab/ci/parsers/sbom/cyclonedx_properties.rb new file mode 100644 index 00000000000..3dc73544208 --- /dev/null +++ b/lib/gitlab/ci/parsers/sbom/cyclonedx_properties.rb @@ -0,0 +1,112 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Parsers + module Sbom + # Parses GitLab CycloneDX metadata properties which are defined by the taxonomy at + # https://gitlab.com/gitlab-org/security-products/gitlab-cyclonedx-property-taxonomy + # + # This parser knows how to process schema version 1 and will not attempt to parse + # later versions. Each source type has it's own namespace in the property schema, + # and is also given its own parser. Properties are filtered by namespace, + # and then passed to each source parser for processing. + class CyclonedxProperties + SUPPORTED_SCHEMA_VERSION = '1' + GITLAB_PREFIX = 'gitlab:' + SOURCE_PARSERS = { + 'dependency_scanning' => ::Gitlab::Ci::Parsers::Sbom::Source::DependencyScanning + }.freeze + SUPPORTED_PROPERTIES = %w[ + meta:schema_version + dependency_scanning:category + dependency_scanning:input_file:path + dependency_scanning:source_file:path + dependency_scanning:package_manager:name + dependency_scanning:language:name + ].freeze + + def self.parse_source(...) + new(...).parse_source + end + + def initialize(properties) + @properties = properties + end + + def parse_source + return unless properties.present? + return unless supported_schema_version? + + source + end + + private + + attr_reader :properties + + def property_data + @property_data ||= properties + .each_with_object({}) { |property, data| parse_property(property, data) } + end + + def parse_property(property, data) + name = property['name'] + value = property['value'] + + # The specification permits the name or value to be absent. + return unless name.present? && value.present? + return unless name.start_with?(GITLAB_PREFIX) + + namespaced_name = name.delete_prefix(GITLAB_PREFIX) + + return unless SUPPORTED_PROPERTIES.include?(namespaced_name) + + parse_name_value_pair(namespaced_name, value, data) + end + + def parse_name_value_pair(name, value, data) + # Each namespace in the property name reflects a key in the hash. + # A property with the name `dependency_scanning:input_file:path` + # and the value `package-lock.json` should be transformed into + # this data: + # {"dependency_scanning": {"input_file": {"path": "package-lock.json"}}} + keys = name.split(':') + + # Remove last item from the keys and use it to create + # the initial object. + last = keys.pop + + # Work backwards. For each key, create a new hash wrapping the previous one. + # Using `dependency_scanning:input_file:path` as an example: + # + # 1. memo = { "path" => "package-lock.json" } (arguments given to reduce) + # 2. memo = { "input_file" => memo } + # 3. memo = { "dependency_scanning" => memo } + property = keys.reverse.reduce({ last => value }) do |memo, key| + { key => memo } + end + + data.deep_merge!(property) + end + + def schema_version + @schema_version ||= property_data.dig('meta', 'schema_version') + end + + def supported_schema_version? + schema_version == SUPPORTED_SCHEMA_VERSION + end + + def source + @source ||= property_data + .slice(*SOURCE_PARSERS.keys) + .lazy + .filter_map { |namespace, data| SOURCE_PARSERS[namespace].source(data) } + .first + end + end + end + end + end +end diff --git a/lib/gitlab/ci/parsers/sbom/source/dependency_scanning.rb b/lib/gitlab/ci/parsers/sbom/source/dependency_scanning.rb new file mode 100644 index 00000000000..ad04b3257f9 --- /dev/null +++ b/lib/gitlab/ci/parsers/sbom/source/dependency_scanning.rb @@ -0,0 +1,49 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Parsers + module Sbom + module Source + class DependencyScanning + REQUIRED_ATTRIBUTES = [ + %w[input_file path] + ].freeze + + def self.source(...) + new(...).source + end + + def initialize(data) + @data = data + end + + def source + return unless required_attributes_present? + + { + 'type' => :dependency_scanning, + 'data' => data, + 'fingerprint' => fingerprint + } + end + + private + + attr_reader :data + + def required_attributes_present? + REQUIRED_ATTRIBUTES.all? do |keys| + data.dig(*keys).present? + end + end + + def fingerprint + Digest::SHA256.hexdigest(data.to_json) + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator.rb b/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator.rb new file mode 100644 index 00000000000..9d56e001c2f --- /dev/null +++ b/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator.rb @@ -0,0 +1,37 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Parsers + module Sbom + module Validators + class CyclonedxSchemaValidator + SCHEMA_PATH = Rails.root.join('app', 'validators', 'json_schemas', 'cyclonedx_report.json').freeze + + def initialize(report_data) + @report_data = report_data + end + + def valid? + errors.empty? + end + + def errors + @errors ||= pretty_errors + end + + private + + def raw_errors + JSONSchemer.schema(SCHEMA_PATH).validate(@report_data) + end + + def pretty_errors + raw_errors.map { |error| JSONSchemer::Errors.pretty(error) } + end + end + end + end + end + end +end diff --git a/lib/gitlab/ci/parsers/security/validators/schema_validator.rb b/lib/gitlab/ci/parsers/security/validators/schema_validator.rb index ee7733a081d..c075ada725a 100644 --- a/lib/gitlab/ci/parsers/security/validators/schema_validator.rb +++ b/lib/gitlab/ci/parsers/security/validators/schema_validator.rb @@ -38,13 +38,14 @@ module Gitlab def initialize(report_type, report_version) @report_type = report_type.to_sym @report_version = report_version.to_s + @supported_versions = SUPPORTED_VERSIONS[@report_type] end delegate :validate, to: :schemer private - attr_reader :report_type, :report_version + attr_reader :report_type, :report_version, :supported_versions def schemer JSONSchemer.schema(pathname) @@ -60,10 +61,24 @@ module Gitlab report_declared_version = File.join(root_path, report_version, file_name) return report_declared_version if File.file?(report_declared_version) + if latest_vendored_patch_version + latest_vendored_patch_version_file = File.join(root_path, latest_vendored_patch_version, file_name) + return latest_vendored_patch_version_file if File.file?(latest_vendored_patch_version) + end + earliest_supported_version = SUPPORTED_VERSIONS[report_type].min File.join(root_path, earliest_supported_version, file_name) end + def latest_vendored_patch_version + ::Security::ReportSchemaVersionMatcher.new( + report_declared_version: report_version, + supported_versions: supported_versions + ).call + rescue ArgumentError + nil + end + def file_name report_type == :api_fuzzing ? "dast-report-format.json" : "#{report_type.to_s.dasherize}-report-format.json" end @@ -79,29 +94,85 @@ module Gitlab @warnings = [] @deprecation_warnings = [] - populate_errors - populate_warnings + populate_schema_version_errors + populate_validation_errors populate_deprecation_warnings end - def valid? - errors.empty? + def populate_schema_version_errors + add_schema_version_errors if add_schema_version_error? end - def populate_errors - schema_validation_errors = schema.validate(report_data).map { |error| JSONSchemer::Errors.pretty(error) } + def add_schema_version_errors + if report_version.nil? + template = _("Report version not provided,"\ + " %{report_type} report type supports versions: %{supported_schema_versions}."\ + " GitLab will attempt to validate this report against the earliest supported versions of this report"\ + " type, to show all the errors but will not ingest the report") + message = format(template, report_type: report_type, supported_schema_versions: supported_schema_versions) + else + template = _("Version %{report_version} for report type %{report_type} is unsupported, supported versions"\ + " for this report type are: %{supported_schema_versions}."\ + " GitLab will attempt to validate this report against the earliest supported versions of this report"\ + " type, to show all the errors but will not ingest the report") + message = format(template, report_version: report_version, report_type: report_type, supported_schema_versions: supported_schema_versions) + end - log_warnings(problem_type: 'schema_validation_fails') unless schema_validation_errors.empty? + log_warnings(problem_type: 'using_unsupported_schema_version') + add_message_as(level: :error, message: message) + end + + def add_schema_version_error? + !report_uses_supported_schema_version? && + !report_uses_deprecated_schema_version? && + !report_uses_supported_major_and_minor_schema_version? + end + + def report_uses_deprecated_schema_version? + DEPRECATED_VERSIONS[report_type].include?(report_version) + end + + def report_uses_supported_schema_version? + SUPPORTED_VERSIONS[report_type].include?(report_version) + end - if Feature.enabled?(:enforce_security_report_validation, @project) - @errors += schema_validation_errors + def report_uses_supported_major_and_minor_schema_version? + if !find_latest_patch_version.nil? + add_supported_major_minor_behavior_warning + true else - @warnings += schema_validation_errors + false end end - def populate_warnings - add_unsupported_report_version_message if !report_uses_supported_schema_version? && !report_uses_deprecated_schema_version? + def find_latest_patch_version + ::Security::ReportSchemaVersionMatcher.new( + report_declared_version: report_version, + supported_versions: SUPPORTED_VERSIONS[report_type] + ).call + rescue ArgumentError + nil + end + + def add_supported_major_minor_behavior_warning + template = _("This report uses a supported MAJOR.MINOR schema version but the PATCH version doesn't match"\ + " any vendored schema version. Validation will be attempted against version"\ + " %{find_latest_patch_version}") + + message = format(template, find_latest_patch_version: find_latest_patch_version) + + add_message_as( + level: :warning, + message: message + ) + end + + def populate_validation_errors + schema_validation_errors = schema.validate(report_data).map { |error| JSONSchemer::Errors.pretty(error) } + + log_warnings(problem_type: 'schema_validation_fails') unless schema_validation_errors.empty? + + @errors += schema_validation_errors end def populate_deprecation_warnings @@ -111,10 +182,19 @@ module Gitlab def add_deprecated_report_version_message log_warnings(problem_type: 'using_deprecated_schema_version') - message = "Version #{report_version} for report type #{report_type} has been deprecated, supported versions for this report type are: #{supported_schema_versions}" + template = _("Version %{report_version} for report type %{report_type} has been deprecated,"\ + " supported versions for this report type are: %{supported_schema_versions}."\ + " GitLab will attempt to parse and ingest this report if valid.") + + message = format(template, report_version: report_version, report_type: report_type, supported_schema_versions: supported_schema_versions) + add_message_as(level: :deprecation_warning, message: message) end + def valid? + errors.empty? + end + def log_warnings(problem_type:) Gitlab::AppLogger.info( message: 'security report schema validation problem', @@ -127,34 +207,6 @@ module Gitlab ) end - def add_unsupported_report_version_message - log_warnings(problem_type: 'using_unsupported_schema_version') - - if Feature.enabled?(:enforce_security_report_validation, @project) - handle_unsupported_report_version(treat_as: :error) - else - handle_unsupported_report_version(treat_as: :warning) - end - end - - def report_uses_deprecated_schema_version? - DEPRECATED_VERSIONS[report_type].include?(report_version) - end - - def report_uses_supported_schema_version? - SUPPORTED_VERSIONS[report_type].include?(report_version) - end - - def handle_unsupported_report_version(treat_as:) - if report_version.nil? - message = "Report version not provided, #{report_type} report type supports versions: #{supported_schema_versions}" - else - message = "Version #{report_version} for report type #{report_type} is unsupported, supported versions for this report type are: #{supported_schema_versions}" - end - - add_message_as(level: treat_as, message: message) - end - def supported_schema_versions SUPPORTED_VERSIONS[report_type].join(", ") end diff --git a/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines.rb b/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines.rb index f637001f9f8..9c12d46cede 100644 --- a/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines.rb +++ b/lib/gitlab/ci/pipeline/chain/cancel_pending_pipelines.rb @@ -40,7 +40,13 @@ module Gitlab ::Ci::Pipeline .id_in(pipeline_ids) .with_only_interruptible_builds - .each { |cancelable| cancelable.auto_cancel_running(pipeline) } + .each do |cancelable_pipeline| + # cascade_to_children not needed because we iterate through descendants here + cancelable_pipeline.cancel_running( + auto_canceled_by_pipeline_id: pipeline.id, + cascade_to_children: false + ) + end end end end diff --git a/lib/gitlab/ci/pipeline/chain/validate/external.rb b/lib/gitlab/ci/pipeline/chain/validate/external.rb index 8177502be1d..6e95c7988fc 100644 --- a/lib/gitlab/ci/pipeline/chain/validate/external.rb +++ b/lib/gitlab/ci/pipeline/chain/validate/external.rb @@ -101,7 +101,8 @@ module Gitlab ref: pipeline.ref, type: pipeline.source }, - builds: builds_validation_payload + builds: builds_validation_payload, + total_builds_count: current_user.pipelines.jobs_count_in_alive_pipelines } end diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/matches.rb b/lib/gitlab/ci/pipeline/expression/lexeme/matches.rb index 6efb3a4f16a..c4f06c4686d 100644 --- a/lib/gitlab/ci/pipeline/expression/lexeme/matches.rb +++ b/lib/gitlab/ci/pipeline/expression/lexeme/matches.rb @@ -14,11 +14,9 @@ module Gitlab return false unless regexp - if ::Feature.enabled?(:ci_fix_rules_if_comparison_with_regexp_variable) - # All variables are evaluated as strings, even if they are regexp strings. - # So, we need to convert them to regexp objects. - regexp = Lexeme::Pattern.build_and_evaluate(regexp, variables) - end + # All variables are evaluated as strings, even if they are regexp strings. + # So, we need to convert them to regexp objects. + regexp = Lexeme::Pattern.build_and_evaluate(regexp, variables) regexp.scan(text.to_s).present? end diff --git a/lib/gitlab/ci/pipeline/expression/lexeme/not_matches.rb b/lib/gitlab/ci/pipeline/expression/lexeme/not_matches.rb index a72e5dbc822..99d9206da74 100644 --- a/lib/gitlab/ci/pipeline/expression/lexeme/not_matches.rb +++ b/lib/gitlab/ci/pipeline/expression/lexeme/not_matches.rb @@ -14,11 +14,9 @@ module Gitlab return true unless regexp - if ::Feature.enabled?(:ci_fix_rules_if_comparison_with_regexp_variable) - # All variables are evaluated as strings, even if they are regexp strings. - # So, we need to convert them to regexp objects. - regexp = Lexeme::Pattern.build_and_evaluate(regexp, variables) - end + # All variables are evaluated as strings, even if they are regexp strings. + # So, we need to convert them to regexp objects. + regexp = Lexeme::Pattern.build_and_evaluate(regexp, variables) regexp.scan(text.to_s).empty? end diff --git a/lib/gitlab/ci/pipeline/seed/build.rb b/lib/gitlab/ci/pipeline/seed/build.rb index 901208f325a..93106b96af2 100644 --- a/lib/gitlab/ci/pipeline/seed/build.rb +++ b/lib/gitlab/ci/pipeline/seed/build.rb @@ -54,9 +54,11 @@ module Gitlab end def errors - return unless included? - strong_memoize(:errors) do + # We check rules errors before checking "included?" because rules affects its inclusion status. + next rules_errors if rules_errors + next unless included? + [needs_errors, variable_expansion_errors].compact.flatten end end @@ -168,6 +170,12 @@ module Gitlab end end + def rules_errors + strong_memoize(:rules_errors) do + ["Failed to parse rule for #{name}: #{rules_result.errors.join(', ')}"] if rules_result.errors.present? + end + end + def evaluate_context strong_memoize(:evaluate_context) do Gitlab::Ci::Build::Context::Build.new(@pipeline, @seed_attributes) diff --git a/lib/gitlab/ci/pipeline/seed/environment.rb b/lib/gitlab/ci/pipeline/seed/environment.rb index c8795840e5f..6bcc71a808b 100644 --- a/lib/gitlab/ci/pipeline/seed/environment.rb +++ b/lib/gitlab/ci/pipeline/seed/environment.rb @@ -30,7 +30,7 @@ module Gitlab end def deployment_tier - job.environment_deployment_tier + job.environment_tier_from_options end def expanded_environment_name diff --git a/lib/gitlab/ci/pipeline/seed/stage.rb b/lib/gitlab/ci/pipeline/seed/stage.rb index bc56fe9bef9..7cf6466cf4b 100644 --- a/lib/gitlab/ci/pipeline/seed/stage.rb +++ b/lib/gitlab/ci/pipeline/seed/stage.rb @@ -36,7 +36,7 @@ module Gitlab def errors strong_memoize(:errors) do - seeds.flat_map(&:errors).compact + @builds.flat_map(&:errors).compact end end diff --git a/lib/gitlab/ci/reports/coverage_report_generator.rb b/lib/gitlab/ci/reports/coverage_report_generator.rb index 76992a48b0a..6d57e05aa63 100644 --- a/lib/gitlab/ci/reports/coverage_report_generator.rb +++ b/lib/gitlab/ci/reports/coverage_report_generator.rb @@ -20,7 +20,7 @@ module Gitlab coverage_report.tap do |coverage_report| report_builds.find_each do |build| - build.each_report(::Ci::JobArtifact::COVERAGE_REPORT_FILE_TYPES) do |file_type, blob| + build.each_report(::Ci::JobArtifact.file_types_for_report(:coverage)) do |file_type, blob| Gitlab::Ci::Parsers.fabricate!(file_type).parse!( blob, coverage_report, diff --git a/lib/gitlab/ci/reports/sbom/component.rb b/lib/gitlab/ci/reports/sbom/component.rb new file mode 100644 index 00000000000..86b9be274cc --- /dev/null +++ b/lib/gitlab/ci/reports/sbom/component.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Reports + module Sbom + class Component + attr_reader :component_type, :name, :version + + def initialize(component = {}) + @component_type = component['type'] + @name = component['name'] + @version = component['version'] + end + end + end + end + end +end diff --git a/lib/gitlab/ci/reports/sbom/report.rb b/lib/gitlab/ci/reports/sbom/report.rb new file mode 100644 index 00000000000..dc6b3153e51 --- /dev/null +++ b/lib/gitlab/ci/reports/sbom/report.rb @@ -0,0 +1,34 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Reports + module Sbom + class Report + attr_reader :components, :source, :errors + + def initialize + @components = [] + @errors = [] + end + + def add_error(error) + errors << error + end + + def set_source(source) + self.source = Source.new(source) + end + + def add_component(component) + components << Component.new(component) + end + + private + + attr_writer :source + end + end + end + end +end diff --git a/lib/gitlab/ci/reports/sbom/reports.rb b/lib/gitlab/ci/reports/sbom/reports.rb new file mode 100644 index 00000000000..efb772cb818 --- /dev/null +++ b/lib/gitlab/ci/reports/sbom/reports.rb @@ -0,0 +1,21 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Reports + module Sbom + class Reports + attr_reader :reports + + def initialize + @reports = [] + end + + def add_report(report) + @reports << report + end + end + end + end + end +end diff --git a/lib/gitlab/ci/reports/sbom/source.rb b/lib/gitlab/ci/reports/sbom/source.rb new file mode 100644 index 00000000000..60bf30b65a5 --- /dev/null +++ b/lib/gitlab/ci/reports/sbom/source.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Gitlab + module Ci + module Reports + module Sbom + class Source + attr_reader :source_type, :data, :fingerprint + + def initialize(source = {}) + @source_type = source['type'] + @data = source['data'] + @fingerprint = source['fingerprint'] + end + end + end + end + end +end diff --git a/lib/gitlab/ci/runner_releases.rb b/lib/gitlab/ci/runner_releases.rb index 8773ecbf09e..dab24bfd501 100644 --- a/lib/gitlab/ci/runner_releases.rb +++ b/lib/gitlab/ci/runner_releases.rb @@ -36,6 +36,9 @@ module Gitlab reset_backoff! extract_releases(response) + rescue Errno::ETIMEDOUT + @backoff_expire_time = next_backoff.from_now + break nil end end @@ -74,7 +77,7 @@ module Gitlab releases = response.parsed_response .map { |release| parse_runner_release(release) } .select(&:valid?) - .sort! + .sort return if releases.empty? && response.parsed_response.present? diff --git a/lib/gitlab/ci/runner_upgrade_check.rb b/lib/gitlab/ci/runner_upgrade_check.rb index 10a89bb15d4..03130addd6a 100644 --- a/lib/gitlab/ci/runner_upgrade_check.rb +++ b/lib/gitlab/ci/runner_upgrade_check.rb @@ -3,57 +3,70 @@ module Gitlab module Ci class RunnerUpgradeCheck - include Singleton + def initialize(gitlab_version, runner_releases_store = nil) + @gitlab_version = ::Gitlab::VersionInfo.parse(gitlab_version, parse_suffix: true) + @releases_store = runner_releases_store + end + + def check_runner_upgrade_suggestion(runner_version) + check_runner_upgrade_suggestions(runner_version).first + end - def check_runner_upgrade_status(runner_version) + private + + def runner_releases_store + @releases_store ||= RunnerReleases.instance + end + + def add_suggestion(suggestions, runner_version, version, status) + return false unless version && version > runner_version + + suggestions[version] = status + true + end + + def check_runner_upgrade_suggestions(runner_version) runner_version = ::Gitlab::VersionInfo.parse(runner_version, parse_suffix: true) - return { invalid_version: runner_version } unless runner_version.valid? - return { error: runner_version } unless runner_releases_store.releases + return { runner_version => :invalid_version } unless runner_version.valid? + return { runner_version => :error } unless runner_releases_store.releases - # Recommend update if outside of backport window - recommended_version = recommendation_if_outside_backport_window(runner_version) - return { recommended: recommended_version } if recommended_version + suggestions = {} - # Recommend patch update if there's a newer release in a same minor branch as runner - recommended_version = recommended_runner_release_update(runner_version) - return { recommended: recommended_version } if recommended_version + # Recommend update if outside of backport window + unless add_recommendation_if_outside_backport_window(runner_version, suggestions) + # Recommend patch update if there's a newer release in a same minor branch as runner + add_recommended_runner_release_update(runner_version, suggestions) + end # Consider update if there's a newer release within the currently deployed GitLab version - available_version = available_runner_release(runner_version) - return { available: available_version } if available_version + add_available_runner_release(runner_version, suggestions) - { not_available: runner_version } - end + suggestions[runner_version] = :not_available if suggestions.empty? - private + suggestions + end - def recommended_runner_release_update(runner_version) + def add_recommended_runner_release_update(runner_version, suggestions) recommended_release = runner_releases_store.releases_by_minor[runner_version.without_patch] - return recommended_release if recommended_release && recommended_release > runner_version + return true if add_suggestion(suggestions, runner_version, recommended_release, :recommended) # Consider the edge case of pre-release runner versions that get registered, but are never published. # In this case, suggest the latest compatible runner version - latest_release = runner_releases_store.releases_by_minor.values.select { |v| v < gitlab_version }.max - latest_release if latest_release && latest_release > runner_version - end - - def available_runner_release(runner_version) - available_release = runner_releases_store.releases_by_minor[gitlab_version.without_patch] - available_release if available_release && available_release > runner_version + latest_release = runner_releases_store.releases_by_minor.values.select { |v| v < @gitlab_version }.max + add_suggestion(suggestions, runner_version, latest_release, :recommended) end - def gitlab_version - @gitlab_version ||= ::Gitlab::VersionInfo.parse(::Gitlab::VERSION, parse_suffix: true) - end - - def runner_releases_store - RunnerReleases.instance + def add_available_runner_release(runner_version, suggestions) + available_version = runner_releases_store.releases_by_minor[@gitlab_version.without_patch] + unless suggestions.include?(available_version) + add_suggestion(suggestions, runner_version, available_version, :available) + end end - def recommendation_if_outside_backport_window(runner_version) - return if runner_releases_store.releases.empty? - return if runner_version >= runner_releases_store.releases.last # return early if runner version is too new + def add_recommendation_if_outside_backport_window(runner_version, suggestions) + return false if runner_releases_store.releases.empty? + return false if runner_version >= runner_releases_store.releases.last # return early if runner version is too new minor_releases_with_index = runner_releases_store.releases_by_minor.keys.each_with_index.to_h runner_minor_version_index = minor_releases_with_index[runner_version.without_patch] @@ -62,14 +75,15 @@ module Gitlab outside_window = minor_releases_with_index.count - runner_minor_version_index > 3 if outside_window - recommended_release = runner_releases_store.releases_by_minor[gitlab_version.without_patch] - - recommended_release if recommended_release && recommended_release > runner_version + recommended_version = runner_releases_store.releases_by_minor[@gitlab_version.without_patch] + return add_suggestion(suggestions, runner_version, recommended_version, :recommended) end else # If unknown runner version, then recommend the latest version for the GitLab instance - recommended_runner_release_update(gitlab_version) + return add_recommended_runner_release_update(@gitlab_version, suggestions) end + + false end end end diff --git a/lib/gitlab/ci/status/bridge/common.rb b/lib/gitlab/ci/status/bridge/common.rb index 263fd9d1052..d66d4b20bba 100644 --- a/lib/gitlab/ci/status/bridge/common.rb +++ b/lib/gitlab/ci/status/bridge/common.rb @@ -16,11 +16,7 @@ module Gitlab def details_path return unless can?(user, :read_pipeline, downstream_pipeline) - if Feature.enabled?(:ci_retry_downstream_pipeline, subject.project) - project_job_path(subject.project, subject) - else - project_pipeline_path(downstream_project, downstream_pipeline) - end + project_pipeline_path(downstream_project, downstream_pipeline) end def has_action? diff --git a/lib/gitlab/ci/templates/5-Minute-Production-App.gitlab-ci.yml b/lib/gitlab/ci/templates/5-Minute-Production-App.gitlab-ci.yml index 71f38ededd9..bb88bee9137 100644 --- a/lib/gitlab/ci/templates/5-Minute-Production-App.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/5-Minute-Production-App.gitlab-ci.yml @@ -39,7 +39,7 @@ cache: terraform_apply: stage: provision - image: registry.gitlab.com/gitlab-org/5-minute-production-app/deploy-template/stable + image: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/5-minute-production-app/deploy-template/stable" extends: .needs_aws_vars resource_group: terraform before_script: @@ -53,7 +53,7 @@ terraform_apply: deploy: stage: deploy - image: registry.gitlab.com/gitlab-org/5-minute-production-app/deploy-template/stable + image: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/5-minute-production-app/deploy-template/stable" extends: .needs_aws_vars resource_group: deploy before_script: @@ -74,7 +74,7 @@ terraform_destroy: variables: GIT_STRATEGY: none stage: destroy - image: registry.gitlab.com/gitlab-org/5-minute-production-app/deploy-template/stable + image: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/5-minute-production-app/deploy-template/stable" before_script: - cp /*.tf . - cp /deploy.sh . diff --git a/lib/gitlab/ci/templates/Dart.gitlab-ci.yml b/lib/gitlab/ci/templates/Dart.gitlab-ci.yml index 35401e62fe2..4e011bb325d 100644 --- a/lib/gitlab/ci/templates/Dart.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Dart.gitlab-ci.yml @@ -6,25 +6,86 @@ # This specific template is located at: # https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Dart.gitlab-ci.yml -# https://hub.docker.com/r/google/dart -image: google/dart:2.8.4 +# https://hub.docker.com/_/dart +image: dart:2.17 variables: - # Use to learn more: - # pub run test --help + # To learn more go to https://dart.dev/tools/dart-test + # Or run `dart test --help` PUB_VARS: "--platform vm --timeout 30s --concurrency=6 --test-randomize-ordering-seed=random --reporter=expanded" +.use-pub-cache-bin: + # Define commands that need to be executed before each job. + before_script: + # Set PUB_CACHE either here or in the CI/CD Settings if you have multiple jobs that use dart commands. + # PUB_CACHE is used by the `dart pub` command, it needs to be set so package dependencies are stored at the project-level for CI/CD operations. + - export PUB_CACHE=".pub-cache" + - export PATH="$PATH:$HOME/$PUB_CACHE/bin" + +# Cache generated files and plugins between builds. +.upload-cache: + cache: + when: 'on_success' + paths: + - .pub-cache/bin/ + - .pub-cache/global_packages/ + - .pub-cache/hosted/ + - .dart_tool/ + - .packages + # Cache downloaded dependencies and plugins between builds. # To keep cache across branches add 'key: "$CI_JOB_NAME"' -cache: - paths: - - .pub-cache/global_packages +.download-cache: + cache: + paths: + - .dart_tool/ + - .packages + policy: pull + +install-dependencies: + stage: .pre + extends: + - .use-pub-cache-bin + - .upload-cache + script: + - dart pub get --no-precompile + +build: + stage: build + needs: + - install-dependencies + extends: + - .use-pub-cache-bin + - .upload-cache + script: + - dart pub get --offline --precompile -before_script: - - export PATH="$PATH:$HOME/.pub-cache/bin" - - pub get --no-precompile +unit-test: + stage: test + needs: + - build + extends: + - .use-pub-cache-bin + - .download-cache + script: + - dart test $PUB_VARS + +lint-test: + stage: test + needs: + - install-dependencies + extends: + - .use-pub-cache-bin + - .download-cache + script: + - dart analyze . -test: +format-test: stage: test + needs: + - install-dependencies + extends: + - .use-pub-cache-bin + - .download-cache script: - - pub run test $PUB_VARS + - dart format --set-exit-if-changed bin/ lib/ test/ diff --git a/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml b/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml index 464b81965f2..dc55277318b 100644 --- a/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Getting-Started.gitlab-ci.yml @@ -42,6 +42,7 @@ lint-test-job: # This job also runs in the test stage. deploy-job: # This job runs in the deploy stage. stage: deploy # It only runs when *both* jobs in the test stage complete successfully. + environment: production script: - echo "Deploying application..." - echo "Application successfully deployed." diff --git a/lib/gitlab/ci/templates/Indeni.Cloudrail.gitlab-ci.yml b/lib/gitlab/ci/templates/Indeni.Cloudrail.gitlab-ci.yml index 7f33d048c1e..34988fcdcde 100644 --- a/lib/gitlab/ci/templates/Indeni.Cloudrail.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Indeni.Cloudrail.gitlab-ci.yml @@ -31,7 +31,7 @@ default: init_and_plan: stage: build - image: registry.gitlab.com/gitlab-org/terraform-images/releases/0.13 + image: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/terraform-images/releases/0.13" rules: - if: $SAST_DISABLED when: never diff --git a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml index 8c63019d743..ce227bad19a 100644 --- a/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Build.gitlab-ci.yml @@ -3,7 +3,7 @@ variables: build: stage: build - image: 'registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:${AUTO_BUILD_IMAGE_VERSION}' + image: '${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-build-image:${AUTO_BUILD_IMAGE_VERSION}' variables: DOCKER_TLS_CERTDIR: '' services: diff --git a/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml index 8c63019d743..ce227bad19a 100644 --- a/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Build.latest.gitlab-ci.yml @@ -3,7 +3,7 @@ variables: build: stage: build - image: 'registry.gitlab.com/gitlab-org/cluster-integration/auto-build-image:${AUTO_BUILD_IMAGE_VERSION}' + image: '${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-build-image:${AUTO_BUILD_IMAGE_VERSION}' variables: DOCKER_TLS_CERTDIR: '' services: diff --git a/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml index 11f8376f0b4..6e8cf15204a 100644 --- a/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/CF-Provision.gitlab-ci.yml @@ -2,7 +2,7 @@ stages: - provision cloud_formation: - image: 'registry.gitlab.com/gitlab-org/cloud-deploy/aws-cloudformation:latest' + image: '${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cloud-deploy/aws-cloudformation:latest' stage: provision script: - gl-cloudformation create-stack diff --git a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml index 86e3ace84c5..e278539d214 100644 --- a/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Code-Quality.gitlab-ci.yml @@ -8,7 +8,7 @@ code_quality: variables: DOCKER_DRIVER: overlay2 DOCKER_TLS_CERTDIR: "" - CODE_QUALITY_IMAGE: "registry.gitlab.com/gitlab-org/ci-cd/codequality:0.85.29" + CODE_QUALITY_IMAGE: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/ci-cd/codequality:0.85.29" needs: [] script: - export SOURCE_CODE=$PWD diff --git a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml index b41e92e3a56..f0ddc4b4916 100644 --- a/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/DAST-Default-Branch-Deploy.gitlab-ci.yml @@ -1,8 +1,8 @@ variables: - DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.30.0' + DAST_AUTO_DEPLOY_IMAGE_VERSION: 'v2.33.0' .dast-auto-deploy: - image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}" + image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${DAST_AUTO_DEPLOY_IMAGE_VERSION}" .common_rules: &common_rules - if: $CI_DEFAULT_BRANCH != $CI_COMMIT_REF_NAME @@ -57,7 +57,7 @@ stop_dast_environment: when: always .ecs_image: - image: 'registry.gitlab.com/gitlab-org/cloud-deploy/aws-ecs:latest' + image: '${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cloud-deploy/aws-ecs:latest' .ecs_rules: &ecs_rules - if: $AUTO_DEVOPS_PLATFORM_TARGET != "ECS" diff --git a/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml index a9d9c400a34..7cbc8e40b47 100644 --- a/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Dependency-Scanning.gitlab-ci.yml @@ -11,7 +11,7 @@ variables: # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" DS_EXCLUDED_ANALYZERS: "" DS_EXCLUDED_PATHS: "spec, test, tests, tmp" DS_MAJOR_VERSION: 3 diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml index f9c0d4333ff..1a2a8b4edb4 100644 --- a/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Deploy.gitlab-ci.yml @@ -1,8 +1,8 @@ variables: - AUTO_DEPLOY_IMAGE_VERSION: 'v2.30.0' + AUTO_DEPLOY_IMAGE_VERSION: 'v2.33.0' .auto-deploy: - image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}" + image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}" dependencies: [] review: diff --git a/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml index 36f1b6981c4..cb8818357a2 100644 --- a/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Deploy.latest.gitlab-ci.yml @@ -1,8 +1,8 @@ variables: - AUTO_DEPLOY_IMAGE_VERSION: 'v2.30.0' + AUTO_DEPLOY_IMAGE_VERSION: 'v2.33.0' .auto-deploy: - image: "registry.gitlab.com/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}" + image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/auto-deploy-image:${AUTO_DEPLOY_IMAGE_VERSION}" dependencies: [] review: diff --git a/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml index ab3bc511cba..8a349f751ea 100644 --- a/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Deploy/EC2.gitlab-ci.yml @@ -3,7 +3,7 @@ stages: - production .push-and-deploy: - image: 'registry.gitlab.com/gitlab-org/cloud-deploy/aws-ec2:latest' + image: '${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cloud-deploy/aws-ec2:latest' script: - gl-ec2 push-to-s3 - gl-ec2 deploy-to-ec2 diff --git a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml index c2d31fd9669..43dc44312da 100644 --- a/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Deploy/ECS.gitlab-ci.yml @@ -7,9 +7,8 @@ # then result in potentially breaking your future pipelines. # # More about including CI templates: https://docs.gitlab.com/ee/ci/yaml/#includetemplate - .ecs_image: - image: 'registry.gitlab.com/gitlab-org/cloud-deploy/aws-ecs:latest' + image: '${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cloud-deploy/aws-ecs:latest' .deploy_to_ecs: extends: .ecs_image diff --git a/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml index d55c126eeb7..b7735068680 100644 --- a/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Helm-2to3.gitlab-ci.yml @@ -3,9 +3,8 @@ # # To use, set the CI variable MIGRATE_HELM_2TO3 to "true". # For more details, go to https://docs.gitlab.com/ee/topics/autodevops/upgrading_auto_deploy_dependencies.html#helm-v3 - .helm-2to3-migrate: - image: registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/helm-2to3-2.17.0-3.5.3-kube-1.16.15-alpine-3.12 + image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/helm-install-image/releases/helm-2to3-2.17.0-3.5.3-kube-1.16.15-alpine-3.12" # NOTE: We use the deploy stage because: # - It exists in all versions of Auto DevOps. # - It is _empty_. @@ -54,7 +53,7 @@ done .helm-2to3-cleanup: - image: registry.gitlab.com/gitlab-org/cluster-integration/helm-install-image/releases/helm-2to3-2.17.0-3.5.3-kube-1.16.15-alpine-3.12 + image: "${CI_TEMPLATE_REGISTRY_HOST}/gitlab-org/cluster-integration/helm-install-image/releases/helm-2to3-2.17.0-3.5.3-kube-1.16.15-alpine-3.12" stage: cleanup environment: action: prepare diff --git a/lib/gitlab/ci/templates/Jobs/License-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/License-Scanning.gitlab-ci.yml index f7945b46a59..f8668699fe5 100644 --- a/lib/gitlab/ci/templates/Jobs/License-Scanning.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/License-Scanning.gitlab-ci.yml @@ -11,7 +11,7 @@ variables: # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" LICENSE_MANAGEMENT_SETUP_CMD: '' # If needed, specify a command to setup your environment with a custom package manager. LICENSE_MANAGEMENT_VERSION: 4 diff --git a/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml index b6358eb0831..c195ecd8ee5 100644 --- a/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/SAST-IaC.gitlab-ci.yml @@ -6,7 +6,7 @@ variables: # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" SAST_IMAGE_SUFFIX: "" SAST_EXCLUDED_PATHS: "spec, test, tests, tmp" @@ -31,7 +31,7 @@ kics-iac-sast: image: name: "$SAST_ANALYZER_IMAGE" variables: - SAST_ANALYZER_IMAGE_TAG: 2 + SAST_ANALYZER_IMAGE_TAG: 3 SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/kics:$SAST_ANALYZER_IMAGE_TAG$SAST_IMAGE_SUFFIX" rules: - if: $SAST_DISABLED diff --git a/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml index b6358eb0831..0513aae00a8 100644 --- a/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/SAST-IaC.latest.gitlab-ci.yml @@ -6,7 +6,7 @@ variables: # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" SAST_IMAGE_SUFFIX: "" SAST_EXCLUDED_PATHS: "spec, test, tests, tmp" @@ -31,11 +31,14 @@ kics-iac-sast: image: name: "$SAST_ANALYZER_IMAGE" variables: - SAST_ANALYZER_IMAGE_TAG: 2 + SAST_ANALYZER_IMAGE_TAG: 3 SAST_ANALYZER_IMAGE: "$SECURE_ANALYZERS_PREFIX/kics:$SAST_ANALYZER_IMAGE_TAG$SAST_IMAGE_SUFFIX" rules: - if: $SAST_DISABLED when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /kics/ when: never - - if: $CI_COMMIT_BRANCH + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. + when: never + - if: $CI_COMMIT_BRANCH # If there's no open merge request, add it to a *branch* pipeline instead. diff --git a/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml index be41553450c..dd164c00724 100644 --- a/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/SAST.gitlab-ci.yml @@ -6,7 +6,7 @@ variables: # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" SAST_IMAGE_SUFFIX: "" SAST_EXCLUDED_ANALYZERS: "" diff --git a/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml index f8e6e152ab9..c6938920ea4 100644 --- a/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/SAST.latest.gitlab-ci.yml @@ -6,7 +6,7 @@ variables: # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" SAST_IMAGE_SUFFIX: "" SAST_EXCLUDED_ANALYZERS: "" @@ -46,7 +46,7 @@ bandit-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /bandit/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.py' - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. @@ -67,7 +67,7 @@ brakeman-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /brakeman/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.rb' - '**/Gemfile' @@ -90,7 +90,7 @@ eslint-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /eslint/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.html' - '**/*.js' @@ -119,7 +119,7 @@ flawfinder-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /flawfinder/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.c' - '**/*.cc' @@ -151,7 +151,7 @@ kubesec-sast: - if: $SAST_EXCLUDED_ANALYZERS =~ /kubesec/ when: never # Add the job to merge request pipelines if there's an open merge request. - - if: $CI_MERGE_REQUEST_IID && + - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $SCAN_KUBERNETES_MANIFESTS == 'true' - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. when: never @@ -171,7 +171,7 @@ gosec-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /gosec/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.go' - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. @@ -196,7 +196,7 @@ mobsf-android-sast: - if: $SAST_EXCLUDED_ANALYZERS =~ /mobsf/ when: never # Add the job to merge request pipelines if there's an open merge request. - - if: $CI_MERGE_REQUEST_IID && + - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $SAST_EXPERIMENTAL_FEATURES == 'true' exists: - '**/*.apk' @@ -218,7 +218,7 @@ mobsf-ios-sast: - if: $SAST_EXCLUDED_ANALYZERS =~ /mobsf/ when: never # Add the job to merge request pipelines if there's an open merge request. - - if: $CI_MERGE_REQUEST_IID && + - if: $CI_PIPELINE_SOURCE == "merge_request_event" && $SAST_EXPERIMENTAL_FEATURES == 'true' exists: - '**/*.ipa' @@ -244,7 +244,7 @@ nodejs-scan-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /nodejs-scan/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/package.json' - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. @@ -265,7 +265,7 @@ phpcs-security-audit-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /phpcs-security-audit/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.php' - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. @@ -286,7 +286,7 @@ pmd-apex-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /pmd-apex/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.cls' - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. @@ -307,7 +307,7 @@ security-code-scan-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /security-code-scan/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.csproj' - '**/*.vbproj' @@ -331,7 +331,7 @@ semgrep-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /semgrep/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.py' - '**/*.js' @@ -366,7 +366,7 @@ sobelow-sast: when: never - if: $SAST_EXCLUDED_ANALYZERS =~ /sobelow/ when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - 'mix.exs' - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. @@ -391,7 +391,7 @@ spotbugs-sast: when: never - if: $SAST_DISABLED when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. exists: - '**/*.groovy' - '**/*.java' diff --git a/lib/gitlab/ci/templates/Jobs/Secret-Detection.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Secret-Detection.gitlab-ci.yml index 3f18237a525..b7a9dbf7bc6 100644 --- a/lib/gitlab/ci/templates/Jobs/Secret-Detection.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Secret-Detection.gitlab-ci.yml @@ -5,7 +5,7 @@ # How to set: https://docs.gitlab.com/ee/ci/yaml/#variables variables: - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" SECRET_DETECTION_IMAGE_SUFFIX: "" SECRETS_ANALYZER_VERSION: "4" diff --git a/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml index e81e06d1a1d..e6eba6f6406 100644 --- a/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Jobs/Secret-Detection.latest.gitlab-ci.yml @@ -5,8 +5,9 @@ # How to set: https://docs.gitlab.com/ee/ci/yaml/#variables variables: - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" SECRET_DETECTION_IMAGE_SUFFIX: "" + SECRETS_ANALYZER_VERSION: "4" SECRET_DETECTION_EXCLUDED_PATHS: "" @@ -28,7 +29,7 @@ secret_detection: rules: - if: $SECRET_DETECTION_DISABLED when: never - - if: $CI_MERGE_REQUEST_IID # Add the job to merge request pipelines if there's an open merge request. + - if: $CI_PIPELINE_SOURCE == "merge_request_event" # Add the job to merge request pipelines if there's an open merge request. - if: $CI_OPEN_MERGE_REQUESTS # Don't add it to a *branch* pipeline if it's already in a merge request pipeline. when: never - if: $CI_COMMIT_BRANCH # If there's no open merge request, add it to a *branch* pipeline instead. diff --git a/lib/gitlab/ci/templates/MATLAB.gitlab-ci.yml b/lib/gitlab/ci/templates/MATLAB.gitlab-ci.yml index 64a063388b2..30767e66649 100644 --- a/lib/gitlab/ci/templates/MATLAB.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/MATLAB.gitlab-ci.yml @@ -3,31 +3,45 @@ # This specific template is located at: # https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/MATLAB.gitlab-ci.yml -# Use this template to run MATLAB and Simulink as part of your CI/CD pipeline. The template has three jobs: +# Use this template to run MATLAB and Simulink as part of your CI/CD pipeline. The template includes three jobs: # - `command`: Run MATLAB scripts, functions, and statements. # - `test`: Run tests authored using the MATLAB unit testing framework or Simulink Test. # - `test_artifacts`: Run MATLAB and Simulink tests, and generate test and coverage artifacts. # +# The jobs in the template use the `matlab -batch` syntax to start MATLAB. The `-batch` option is supported +# in MATLAB R2019a and later. +# # You can copy and paste one or more jobs in this template into your `.gitlab-ci.yml` file. # You should not add this template to an existing `.gitlab-ci.yml` file by using the `include:` keyword. # -# - To run MATLAB and Simulink, MATLAB must be installed on the runner that will run the jobs. -# The runner will use the topmost MATLAB version on the system path. -# The build fails if the operating system cannot find MATLAB on the path. -# - The jobs in this template use the `matlab -batch` syntax to start MATLAB. The `-batch` option is supported -# in MATLAB R2019a and later. + +# Your runner must use the Docker executor to run MATLAB within a container. The [MATLAB Container on Docker Hub][1] +# lets you run your build using MATLAB R2020b or a later release. If your build requires additional toolboxes, use a +# custom MATLAB container instead. For more information on how to create and use a custom MATLAB container, +# see [Create a Custom MATLAB Container][2]. +# +# [1] https://www.mathworks.com/help/cloudcenter/ug/matlab-container-on-docker-hub.html +# [2] https://www.mathworks.com/help/cloudcenter/ug/create-a-custom-matlab-container.html + +# The jobs in this template incorporate the contents of a hidden `.matlab_defaults` job. You need to +# configure this job before running the `command`, `test`, and `test_artifacts` jobs. To configure the job: +# - Specify the name of the MATLAB container image you want to use. +# - Set the `MLM_LICENSE_FILE` environment variable using the port number and DNS address for your network license manager. +# +.matlab_defaults: + image: + name: mathworks/matlab:latest # Replace the value with the name of the MATLAB container image you want to use + entrypoint: [""] + variables: + MLM_LICENSE_FILE: 27000@MyLicenseServer # Replace the value with the port number and DNS address for your network license manager # The `command` job runs MATLAB scripts, functions, and statements. To use the job in your pipeline, # substitute `mycommand` with the code you want to run. # command: + extends: .matlab_defaults script: matlab -batch mycommand -# If the value of `mycommand` is the name of a MATLAB script or function, do not specify the file extension. -# For example, to run a script named `myscript.m` in the root of your repository, specify `mycommand` like this: -# -# "myscript" -# # If you specify more than one script, function, or statement, use a comma or semicolon to separate them. # For example, to run `myscript.m` in a folder named `myfolder` located in the root of the repository, # you can specify `mycommand` like this: @@ -36,51 +50,51 @@ command: # # MATLAB exits with exit code 0 if the specified script, function, or statement executes successfully without # error. Otherwise, MATLAB terminates with a nonzero exit code, which causes the job to fail. To have the -# job fail in certain conditions, use the [`assert`][1] or [`error`][2] functions. +# job fail in certain conditions, use the [`assert`][3] or [`error`][4] functions. # -# [1] https://www.mathworks.com/help/matlab/ref/assert.html -# [2] https://www.mathworks.com/help/matlab/ref/error.html +# [3] https://www.mathworks.com/help/matlab/ref/assert.html +# [4] https://www.mathworks.com/help/matlab/ref/error.html -# The `test` job runs the MATLAB and Simulink tests in your project. It calls the [`runtests`][3] function -# to run the tests and then the [`assertSuccess`][4] method to fail the job if any of the tests fail. +# The `test` job runs the MATLAB and Simulink tests in your project. It calls the [`runtests`][5] function +# to run the tests and then the [`assertSuccess`][6] method to fail the job if any of the tests fail. # test: + extends: .matlab_defaults script: matlab -batch "results = runtests('IncludeSubfolders',true), assertSuccess(results);" -# By default, the job includes any files in your [MATLAB Project][5] that have a `Test` label. If your repository +# By default, the job includes any files in your [MATLAB Project][7] that have a `Test` label. If your repository # does not have a MATLAB project, then the job includes all tests in the root of your repository or in any of # its subfolders. # -# [3] https://www.mathworks.com/help/matlab/ref/runtests.html -# [4] https://www.mathworks.com/help/matlab/ref/matlab.unittest.testresult.assertsuccess.html -# [5] https://www.mathworks.com/help/matlab/projects.html +# [5] https://www.mathworks.com/help/matlab/ref/runtests.html +# [6] https://www.mathworks.com/help/matlab/ref/matlab.unittest.testresult.assertsuccess.html +# [7] https://www.mathworks.com/help/matlab/projects.html # The `test_artifacts` job runs your tests and additionally generates test and coverage artifacts. -# It uses the plugin classes in the [`matlab.unittest.plugins`][6] package to generate a JUnit test results +# It uses the plugin classes in the [`matlab.unittest.plugins`][8] package to generate a JUnit test results # report and a Cobertura code coverage report. Like the `test` job, this job runs all the tests in your # project and fails the build if any of the tests fail. # test_artifacts: + extends: .matlab_defaults script: | - matlab -batch " - import matlab.unittest.TestRunner - import matlab.unittest.Verbosity - import matlab.unittest.plugins.CodeCoveragePlugin - import matlab.unittest.plugins.XMLPlugin - import matlab.unittest.plugins.codecoverage.CoberturaFormat - - suite = testsuite(pwd,'IncludeSubfolders',true); - - [~,~] = mkdir('artifacts'); - - runner = TestRunner.withTextOutput('OutputDetail',Verbosity.Detailed); - runner.addPlugin(XMLPlugin.producingJUnitFormat('artifacts/results.xml')) - runner.addPlugin(CodeCoveragePlugin.forFolder(pwd,'IncludingSubfolders',true, ... - 'Producing',CoberturaFormat('artifacts/cobertura.xml'))) - - results = runner.run(suite) - assertSuccess(results);" - + cat <<- 'BLOCK' > runAllTests.m + import matlab.unittest.TestRunner + import matlab.unittest.Verbosity + import matlab.unittest.plugins.CodeCoveragePlugin + import matlab.unittest.plugins.XMLPlugin + import matlab.unittest.plugins.codecoverage.CoberturaFormat + suite = testsuite(pwd,'IncludeSubfolders',true); + [~,~] = mkdir('artifacts') + runner = TestRunner.withTextOutput('OutputDetail',Verbosity.Detailed); + runner.addPlugin(XMLPlugin.producingJUnitFormat('artifacts/results.xml')) + % Replace `pwd` with the location of the folder containing source code + runner.addPlugin(CodeCoveragePlugin.forFolder(pwd,'IncludingSubfolders',true, ... + 'Producing',CoberturaFormat('artifacts/cobertura.xml'))) + results = runner.run(suite) + assertSuccess(results); + BLOCK + matlab -batch runAllTests artifacts: reports: junit: "./artifacts/results.xml" @@ -92,7 +106,7 @@ test_artifacts: # You can modify the contents of the `test_artifacts` job depending on your goals. For more # information on how to customize the test runner and generate various test and coverage artifacts, -# see [Generate Artifacts Using MATLAB Unit Test Plugins][7]. +# see [Generate Artifacts Using MATLAB Unit Test Plugins][9]. # -# [6] https://www.mathworks.com/help/matlab/ref/matlab.unittest.plugins-package.html -# [7] https://www.mathworks.com/help/matlab/matlab_prog/generate-artifacts-using-matlab-unit-test-plugins.html +# [8] https://www.mathworks.com/help/matlab/ref/matlab.unittest.plugins-package.html +# [9] https://www.mathworks.com/help/matlab/matlab_prog/generate-artifacts-using-matlab-unit-test-plugins.html diff --git a/lib/gitlab/ci/templates/Pages/Hugo.gitlab-ci.yml b/lib/gitlab/ci/templates/Pages/Hugo.gitlab-ci.yml index cfc4a1d904a..591eebf9cd6 100644 --- a/lib/gitlab/ci/templates/Pages/Hugo.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Pages/Hugo.gitlab-ci.yml @@ -6,7 +6,7 @@ --- # All available Hugo versions are listed here: # https://gitlab.com/pages/hugo/container_registry -image: registry.gitlab.com/pages/hugo:latest +image: "${CI_TEMPLATE_REGISTRY_HOST}/pages/hugo:latest" variables: GIT_SUBMODULE_STRATEGY: recursive diff --git a/lib/gitlab/ci/templates/Security/API-Fuzzing.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/API-Fuzzing.gitlab-ci.yml index 2fd5b409f5e..cdfa4556769 100644 --- a/lib/gitlab/ci/templates/Security/API-Fuzzing.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/API-Fuzzing.gitlab-ci.yml @@ -24,7 +24,7 @@ variables: # Setting this variable affects all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" # FUZZAPI_VERSION: "2" FUZZAPI_IMAGE_SUFFIX: "" diff --git a/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml index 450969fcdab..8d6c191edc4 100644 --- a/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/API-Fuzzing.latest.gitlab-ci.yml @@ -24,7 +24,7 @@ variables: # Setting this variable affects all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" # FUZZAPI_VERSION: "2" FUZZAPI_IMAGE_SUFFIX: "" diff --git a/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml index bec269e2933..3d7883fb87a 100644 --- a/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/Container-Scanning.gitlab-ci.yml @@ -22,7 +22,7 @@ # List of available variables: https://docs.gitlab.com/ee/user/application_security/container_scanning/#available-variables variables: - CS_ANALYZER_IMAGE: registry.gitlab.com/security-products/container-scanning:5 + CS_ANALYZER_IMAGE: "$CI_TEMPLATE_REGISTRY_HOST/security-products/container-scanning:5" container_scanning: image: "$CS_ANALYZER_IMAGE$CS_IMAGE_SUFFIX" diff --git a/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml index 893098d33c4..1b33596baa0 100644 --- a/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST-API.gitlab-ci.yml @@ -24,7 +24,7 @@ variables: # Setting this variable affects all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" # DAST_API_VERSION: "2" DAST_API_IMAGE_SUFFIX: "" diff --git a/lib/gitlab/ci/templates/Security/DAST-API.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST-API.latest.gitlab-ci.yml index 3acc3b06031..8aabf20c5df 100644 --- a/lib/gitlab/ci/templates/Security/DAST-API.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST-API.latest.gitlab-ci.yml @@ -24,7 +24,7 @@ variables: # Setting this variable affects all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" # DAST_API_VERSION: "2" DAST_API_IMAGE_SUFFIX: "" diff --git a/lib/gitlab/ci/templates/Security/DAST-On-Demand-API-Scan.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST-On-Demand-API-Scan.gitlab-ci.yml index 4a72f5e72b1..1bd527a6ec0 100644 --- a/lib/gitlab/ci/templates/Security/DAST-On-Demand-API-Scan.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST-On-Demand-API-Scan.gitlab-ci.yml @@ -10,7 +10,7 @@ stages: - dast variables: - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" DAST_API_VERSION: "2" DAST_API_IMAGE_SUFFIX: "" DAST_API_IMAGE: api-security diff --git a/lib/gitlab/ci/templates/Security/DAST-On-Demand-Scan.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST-On-Demand-Scan.gitlab-ci.yml index c71a1b1873a..701e08ba56d 100644 --- a/lib/gitlab/ci/templates/Security/DAST-On-Demand-Scan.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST-On-Demand-Scan.gitlab-ci.yml @@ -13,7 +13,7 @@ variables: DAST_VERSION: 3 # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" dast: stage: dast diff --git a/lib/gitlab/ci/templates/Security/DAST-Runner-Validation.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST-Runner-Validation.gitlab-ci.yml index d27a08db181..5b6af37977e 100644 --- a/lib/gitlab/ci/templates/Security/DAST-Runner-Validation.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST-Runner-Validation.gitlab-ci.yml @@ -15,7 +15,7 @@ variables: validation: stage: dast image: - name: "registry.gitlab.com/security-products/dast-runner-validation:$DAST_RUNNER_VALIDATION_VERSION" + name: "$CI_TEMPLATE_REGISTRY_HOST/security-products/dast-runner-validation:$DAST_RUNNER_VALIDATION_VERSION" variables: GIT_STRATEGY: none allow_failure: false diff --git a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml index 3bc44fe5e1b..40060e96dff 100644 --- a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml @@ -25,7 +25,7 @@ variables: DAST_VERSION: 3 # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" dast: stage: dast diff --git a/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml index 10549b56856..9d3b1f4316e 100644 --- a/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/DAST.latest.gitlab-ci.yml @@ -25,7 +25,7 @@ variables: DAST_VERSION: 3 # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" dast: stage: dast @@ -39,6 +39,8 @@ dast: - if [ -z "$DAST_WEBSITE$DAST_API_SPECIFICATION" ]; then echo "Either DAST_WEBSITE or DAST_API_SPECIFICATION must be set. See https://docs.gitlab.com/ee/user/application_security/dast/#configuration for more details." && exit 1; fi - /analyze artifacts: + paths: + - dast_artifacts/* reports: dast: gl-dast-report.json rules: @@ -55,3 +57,7 @@ dast: $GITLAB_FEATURES =~ /\bdast\b/ - if: $CI_COMMIT_BRANCH && $GITLAB_FEATURES =~ /\bdast\b/ + after_script: + # Remove any debug.log files because they might contain secrets. + - rm -f /zap/wrk/**/debug.log + - cp -r /zap/wrk dast_artifacts diff --git a/lib/gitlab/ci/templates/Security/Fortify-FoD-sast.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Fortify-FoD-sast.gitlab-ci.yml new file mode 100644 index 00000000000..acd532132f4 --- /dev/null +++ b/lib/gitlab/ci/templates/Security/Fortify-FoD-sast.gitlab-ci.yml @@ -0,0 +1,52 @@ +# This template is provided and maintained by Fortify, an official Technology Partner with GitLab. +# You can copy and paste this template into a new `.gitlab-ci.yml` file. +# You should not add this template to an existing `.gitlab-ci.yml` file by using the `include:` keyword. + +################################################################################################################################################ +# Fortify lets you build secure software fast with an appsec platform that automates testing throughout the DevSecOps pipeline. Fortify static,# +# dynamic, interactive, and runtime security testing is available on premises or as a service. To learn more about Fortify, start a free trial # +# or contact our sales team, visit microfocus.com/appsecurity. # +# # +# Use this pipeline template as a basis for integrating Fortify on Demand Static Application Security Testing(SAST) into your GitLab pipelines.# +# This template demonstrates the steps to prepare the code+dependencies and initiate a scan. As an option, it also supports waiting for the # +# SAST scan to complete and optinally failing the job. Software Composition Analysis can be also be performed in conjunection with the SAST # +# scan if that service has been purchased. Users should review inputs and environment variables below to configure scanning for an existing # +# application in your Fortify on Demand tenant. Additional information is available in the comments throughout the template and the Fortify on # +# Demand, FoD Uploader and ScanCentral Client product documentation. If you need additional assistance with configuration, feel free to create # +# a help ticket in the Fortify on Demand portal. # +################################################################################################################################################ + +fortify_fod_sast: + image: fortifydocker/fortify-ci-tools:3-jdk-8 + variables: + # Update/override PACKAGE_OPTS based on the ScanCentral Client documentation for your project's included tech stack(s). Helpful hints: + # ScanCentral Client will download dependencies for maven (-bt mvn) and gradle (-bt gradle). + # The current fortify-ci-tools image is Linux only at this time. Msbuild integration is not currently supported. + # ScanCentral has additional options that should be set for PHP and Python projects. + # For other build tools (-bt none), add your build commands to download necessary dependencies and prepare according to Fortify on Demand Packaging documentation. + # ScanCentral Client documentation is located at https://www.microfocus.com/documentation/fortify-software-security-center/ + PACKAGE_OPTS: "-bt mvn" + + # Update/override the FoDUploader environment variables as needed. For more information on FoDUploader commands, see https://github.com/fod-dev/fod-uploader-java. Helpful hints: + # Credentials (FOD_USERNAME, FOD_PAT, FOD_TENANT) are expected as GitLab CICD Variables in the template (masking recommended). + # Static scan settings should be configured in Fortify on Demand portal (Automated Audit preference strongly recommended). + # FOD_RELEASE_ID is expected as a GitLab CICD Variable. + # FOD_UPLOADER_OPTS can be adjusted to wait for scan completion/pull results (-I 1) and control whether to fail the job (-apf). + FOD_URL: "https://ams.fortify.com" + FOD_API_URL: "https://api.ams.fortify.com/" + FOD_UPLOADER_OPTS: "-ep 2 -pp 0" + FOD_NOTES: "Triggered by Gitlab Pipeline IID $CI_PIPELINE_IID: $CI_PIPELINE_URL" + + script: + # Package source code and dependencies using Fortify ScanCentral client + - 'scancentral package $PACKAGE_OPTS -o package.zip' + # Start Fortify on Demand SAST scan + - 'FoDUpload -z package.zip -aurl $FOD_API_URL -purl $FOD_URL -rid "$FOD_RELEASE" -tc "$FOD_TENANT" -uc "$FOD_USERNAME" "$FOD_PAT" $FOD_UPLOADER_OPTS -I 1 -n "$FOD_NOTES"' + # Generate GitLab reports + - 'FortifyVulnerabilityExporter FoDToGitLabSAST --fod.baseUrl=$FOD_URL --fod.tenant="$FOD_TENANT" --fod.userName="$FOD_USERNAME" --fod.password="$FOD_PAT" --fod.release.id=$FOD_RELEASE' + # Change to false to fail the entire pipeline if the scan fails and/or the result of a scan causes security policy failure (see "-apf" option in FoDUploader documentation) + allow_failure: true + # Report SAST vulnerabilities back to GitLab + artifacts: + reports: + sast: gl-fortify-sast.json diff --git a/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml index c414e70bfa3..fd04c86e6c7 100644 --- a/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Security/Secure-Binaries.gitlab-ci.yml @@ -16,7 +16,7 @@ variables: # Setting this variable will affect all Security templates # (SAST, Dependency Scanning, ...) - SECURE_ANALYZERS_PREFIX: "registry.gitlab.com/security-products" + SECURE_ANALYZERS_PREFIX: "$CI_TEMPLATE_REGISTRY_HOST/security-products" SECURE_BINARIES_ANALYZERS: >- bandit, brakeman, gosec, spotbugs, flawfinder, phpcs-security-audit, security-code-scan, nodejs-scan, eslint, secrets, sobelow, pmd-apex, kics, kubesec, semgrep, gemnasium, gemnasium-maven, gemnasium-python, license-finder, @@ -246,7 +246,7 @@ dast-runner-validation: extends: .download_images variables: SECURE_BINARIES_ANALYZER_VERSION: "1" - SECURE_BINARIES_IMAGE: "registry.gitlab.com/security-products/${CI_JOB_NAME}:${SECURE_BINARIES_ANALYZER_VERSION}" + SECURE_BINARIES_IMAGE: "${CI_TEMPLATE_REGISTRY_HOST}/security-products/${CI_JOB_NAME}:${SECURE_BINARIES_ANALYZER_VERSION}" only: variables: - $SECURE_BINARIES_DOWNLOAD_IMAGES == "true" && diff --git a/lib/gitlab/ci/templates/Terraform/Base.gitlab-ci.yml b/lib/gitlab/ci/templates/Terraform/Base.gitlab-ci.yml index 6f9a9c5133c..3a956ebfc49 100644 --- a/lib/gitlab/ci/templates/Terraform/Base.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Terraform/Base.gitlab-ci.yml @@ -9,7 +9,7 @@ # There is a more opinionated template which we suggest the users to abide, # which is the lib/gitlab/ci/templates/Terraform.gitlab-ci.yml image: - name: registry.gitlab.com/gitlab-org/terraform-images/releases/terraform:1.1.9 + name: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/terraform-images/releases/terraform:1.1.9" variables: TF_ROOT: ${CI_PROJECT_DIR} # The relative path to the root directory of the Terraform project diff --git a/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml b/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml index 9ba009a5bca..4579f31d7ac 100644 --- a/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Terraform/Base.latest.gitlab-ci.yml @@ -10,7 +10,7 @@ # which is the lib/gitlab/ci/templates/Terraform.latest.gitlab-ci.yml image: - name: registry.gitlab.com/gitlab-org/terraform-images/stable:latest + name: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/terraform-images/stable:latest" variables: TF_ROOT: ${CI_PROJECT_DIR} # The relative path to the root directory of the Terraform project diff --git a/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml b/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml index 2b5e86f4066..488b035d189 100644 --- a/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml +++ b/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml @@ -4,7 +4,6 @@ # https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Verify/Accessibility.gitlab-ci.yml # Read more about the feature here: https://docs.gitlab.com/ee/user/project/merge_requests/accessibility_testing.html - stages: - build - test @@ -13,7 +12,7 @@ stages: a11y: stage: accessibility - image: registry.gitlab.com/gitlab-org/ci-cd/accessibility:6.2.3 + image: "$CI_TEMPLATE_REGISTRY_HOST/gitlab-org/ci-cd/accessibility:6.2.3" script: - /gitlab-accessibility.sh "$a11y_urls" allow_failure: true diff --git a/lib/gitlab/ci/trace/chunked_io.rb b/lib/gitlab/ci/trace/chunked_io.rb index 9f24ba99201..32f64948635 100644 --- a/lib/gitlab/ci/trace/chunked_io.rb +++ b/lib/gitlab/ci/trace/chunked_io.rb @@ -22,7 +22,7 @@ module Gitlab @chunks_cache = [] @tell = 0 @size = calculate_size - yield self if block_given? + yield self if block end def close diff --git a/lib/gitlab/ci/variables/collection.rb b/lib/gitlab/ci/variables/collection.rb index a00c1da97ea..52673d03e69 100644 --- a/lib/gitlab/ci/variables/collection.rb +++ b/lib/gitlab/ci/variables/collection.rb @@ -72,24 +72,32 @@ module Gitlab Collection.new(@variables.reject(&block)) end - def expand_value(value, keep_undefined: false) + def expand_value(value, keep_undefined: false, expand_file_vars: true) value.gsub(Item::VARIABLES_REGEXP) do - match = Regexp.last_match - if match[:key] - # we matched variable - if variable = self[match[:key]] - variable.value - elsif keep_undefined - match[0] - end + match = Regexp.last_match # it is either a valid variable definition or a ($$ / %%) + full_match = match[0] + variable_name = match[:key] + + next full_match unless variable_name # it is a ($$ / %%), so we don't touch it + + # now we know that it is a valid variable definition: $VARIABLE_NAME / %VARIABLE_NAME / ${VARIABLE_NAME} + + # we are trying to find a variable with key VARIABLE_NAME + variable = self[variable_name] + + if variable # VARIABLE_NAME is an existing variable + next variable.value unless variable.file? + + expand_file_vars ? variable.value : full_match + elsif keep_undefined + full_match # we do not touch the variable definition else - # we escape sequence - match[0] + nil # we remove the variable definition end end end - def sort_and_expand_all(keep_undefined: false) + def sort_and_expand_all(keep_undefined: false, expand_file_vars: true) sorted = Sort.new(self) return self.class.new(self, sorted.errors) unless sorted.valid? @@ -103,7 +111,8 @@ module Gitlab # expand variables as they are added variable = item.to_runner_variable - variable[:value] = new_collection.expand_value(variable[:value], keep_undefined: keep_undefined) + variable[:value] = new_collection.expand_value(variable[:value], keep_undefined: keep_undefined, + expand_file_vars: expand_file_vars) new_collection.append(variable) end diff --git a/lib/gitlab/ci/variables/collection/item.rb b/lib/gitlab/ci/variables/collection/item.rb index 0217e6129ca..ea2aa8f2db8 100644 --- a/lib/gitlab/ci/variables/collection/item.rb +++ b/lib/gitlab/ci/variables/collection/item.rb @@ -25,6 +25,10 @@ module Gitlab @variable.fetch(:raw) end + def file? + @variable.fetch(:file) + end + def [](key) @variable.fetch(key) end diff --git a/lib/gitlab/ci/variables/helpers.rb b/lib/gitlab/ci/variables/helpers.rb index 3a62f01e2e3..300b2708e6d 100644 --- a/lib/gitlab/ci/variables/helpers.rb +++ b/lib/gitlab/ci/variables/helpers.rb @@ -6,24 +6,22 @@ module Gitlab module Helpers class << self def merge_variables(current_vars, new_vars) - current_vars = transform_from_yaml_variables(current_vars) - new_vars = transform_from_yaml_variables(new_vars) + return current_vars if new_vars.blank? - transform_to_yaml_variables( - current_vars.merge(new_vars) - ) - end + current_vars = transform_to_array(current_vars) if current_vars.is_a?(Hash) + new_vars = transform_to_array(new_vars) if new_vars.is_a?(Hash) - def transform_to_yaml_variables(vars) - vars.to_h.map do |key, value| - { key: key.to_s, value: value, public: true } - end + (new_vars + current_vars).uniq { |var| var[:key] } end - def transform_from_yaml_variables(vars) - return vars.stringify_keys if vars.is_a?(Hash) - - vars.to_a.to_h { |var| [var[:key].to_s, var[:value]] } + def transform_to_array(vars) + vars.to_h.map do |key, data| + if data.is_a?(Hash) + { key: key.to_s, **data.except(:key) } + else + { key: key.to_s, value: data } + end + end end def inherit_yaml_variables(from:, to:, inheritance:) @@ -35,7 +33,7 @@ module Gitlab def apply_inheritance(variables, inheritance) case inheritance when true then variables - when false then {} + when false then [] when Array then variables.select { |var| inheritance.include?(var[:key]) } end end diff --git a/lib/gitlab/ci/yaml_processor/result.rb b/lib/gitlab/ci/yaml_processor/result.rb index 576fb509d47..f203f88442d 100644 --- a/lib/gitlab/ci/yaml_processor/result.rb +++ b/lib/gitlab/ci/yaml_processor/result.rb @@ -39,11 +39,11 @@ module Gitlab end def workflow_rules - @workflow_rules ||= hash_config.dig(:workflow, :rules) + @workflow_rules ||= @ci_config.workflow_rules end def root_variables - @root_variables ||= transform_to_yaml_variables(variables) + @root_variables ||= transform_to_array(variables) end def jobs @@ -70,7 +70,7 @@ module Gitlab environment: job[:environment_name], coverage_regex: job[:coverage], # yaml_variables is calculated with using job_variables in Seed::Build - job_variables: transform_to_yaml_variables(job[:job_variables]), + job_variables: transform_to_array(job[:job_variables]), root_variables_inheritance: job[:root_variables_inheritance], needs_attributes: job.dig(:needs, :job), interruptible: job[:interruptible], @@ -114,7 +114,7 @@ module Gitlab Gitlab::Ci::Variables::Helpers.inherit_yaml_variables( from: root_variables, - to: transform_to_yaml_variables(job[:job_variables]), + to: job[:job_variables], inheritance: job.fetch(:root_variables_inheritance, true) ) end @@ -133,16 +133,12 @@ module Gitlab @variables ||= @ci_config.variables end - def hash_config - @hash_config ||= @ci_config.to_hash - end - def release(job) job[:release] end - def transform_to_yaml_variables(variables) - ::Gitlab::Ci::Variables::Helpers.transform_to_yaml_variables(variables) + def transform_to_array(variables) + ::Gitlab::Ci::Variables::Helpers.transform_to_array(variables) end end end diff --git a/lib/gitlab/cleanup/project_uploads.rb b/lib/gitlab/cleanup/project_uploads.rb index ed4b363416c..7f24b2f78b0 100644 --- a/lib/gitlab/cleanup/project_uploads.rb +++ b/lib/gitlab/cleanup/project_uploads.rb @@ -56,7 +56,7 @@ module Gitlab # rubocop: enable CodeReuse/ActiveRecord def move_to_lost_and_found(path, dry_run) - new_path = path.sub(/\A#{ProjectUploadFileFinder::ABSOLUTE_UPLOAD_DIR}/, LOST_AND_FOUND) + new_path = path.sub(/\A#{ProjectUploadFileFinder::ABSOLUTE_UPLOAD_DIR}/o, LOST_AND_FOUND) move(path, new_path, 'move to lost and found', dry_run) end diff --git a/lib/gitlab/contributions_calendar.rb b/lib/gitlab/contributions_calendar.rb index 4d289a59a6a..a45380aca6c 100644 --- a/lib/gitlab/contributions_calendar.rb +++ b/lib/gitlab/contributions_calendar.rb @@ -43,7 +43,7 @@ module Gitlab .group(:date) .map(&:attributes) - @activity_dates = events.each_with_object(Hash.new {|h, k| h[k] = 0 }) do |event, activities| + @activity_dates = events.each_with_object(Hash.new { |h, k| h[k] = 0 }) do |event, activities| activities[event["date"]] += event["num_events"] end end diff --git a/lib/gitlab/current_settings.rb b/lib/gitlab/current_settings.rb index 8ef4977177a..5c4899da11f 100644 --- a/lib/gitlab/current_settings.rb +++ b/lib/gitlab/current_settings.rb @@ -84,7 +84,11 @@ module Gitlab def connect_to_db? # When the DBMS is not available, an exception (e.g. PG::ConnectionBad) is raised - active_db_connection = ::ApplicationSetting.connection.active? rescue false + active_db_connection = begin + ::ApplicationSetting.connection.active? + rescue StandardError + false + end active_db_connection && ApplicationSetting.database.cached_table_exists? diff --git a/lib/gitlab/daemon.rb b/lib/gitlab/daemon.rb index 04d13778499..49828e54d7e 100644 --- a/lib/gitlab/daemon.rb +++ b/lib/gitlab/daemon.rb @@ -1,6 +1,7 @@ # frozen_string_literal: true module Gitlab + # DEPRECATED. Use Gitlab::BackgroundTask for new code instead. class Daemon # Options: # - recreate: We usually only allow a single instance per process to exist; diff --git a/lib/gitlab/data_builder/issuable.rb b/lib/gitlab/data_builder/issuable.rb index d12537c4874..9a7b4d0e2aa 100644 --- a/lib/gitlab/data_builder/issuable.rb +++ b/lib/gitlab/data_builder/issuable.rb @@ -26,6 +26,10 @@ module Gitlab hook_data[:assignees] = issuable.assignees.map(&:hook_attrs) if issuable.assignees.any? + if issuable.allows_reviewers? && issuable.reviewers.any? + hook_data[:reviewers] = issuable.reviewers.map(&:hook_attrs) + end + hook_data end diff --git a/lib/gitlab/database/async_indexes.rb b/lib/gitlab/database/async_indexes.rb index d89d5238356..6f301a66803 100644 --- a/lib/gitlab/database/async_indexes.rb +++ b/lib/gitlab/database/async_indexes.rb @@ -6,10 +6,16 @@ module Gitlab DEFAULT_INDEXES_PER_INVOCATION = 2 def self.create_pending_indexes!(how_many: DEFAULT_INDEXES_PER_INVOCATION) - PostgresAsyncIndex.order(:id).limit(how_many).each do |async_index| + PostgresAsyncIndex.to_create.order(:id).limit(how_many).each do |async_index| IndexCreator.new(async_index).perform end end + + def self.drop_pending_indexes!(how_many: DEFAULT_INDEXES_PER_INVOCATION) + PostgresAsyncIndex.to_drop.order(:id).limit(how_many).each do |async_index| + IndexDestructor.new(async_index).perform + end + end end end end diff --git a/lib/gitlab/database/async_indexes/index_destructor.rb b/lib/gitlab/database/async_indexes/index_destructor.rb new file mode 100644 index 00000000000..fe05872b87a --- /dev/null +++ b/lib/gitlab/database/async_indexes/index_destructor.rb @@ -0,0 +1,66 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module AsyncIndexes + class IndexDestructor + include ExclusiveLeaseGuard + + TIMEOUT_PER_ACTION = 1.day + + def initialize(async_index) + @async_index = async_index + end + + def perform + try_obtain_lease do + if !index_exists? + log_index_info('Skipping dropping as the index does not exist') + else + log_index_info('Dropping async index') + + retries = Gitlab::Database::WithLockRetriesOutsideTransaction.new( + connection: connection, + timing_configuration: Gitlab::Database::Reindexing::REMOVE_INDEX_RETRY_CONFIG, + klass: self.class, + logger: Gitlab::AppLogger + ) + + retries.run(raise_on_exhaustion: false) do + connection.execute(async_index.definition) + end + + log_index_info('Finished dropping async index') + end + + async_index.destroy + end + end + + private + + attr_reader :async_index + + def index_exists? + connection.indexes(async_index.table_name).any? { |index| index.name == async_index.name } + end + + def connection + @connection ||= async_index.connection + end + + def lease_timeout + TIMEOUT_PER_ACTION + end + + def lease_key + [super, async_index.connection_db_config.name].join('/') + end + + def log_index_info(message) + Gitlab::AppLogger.info(message: message, table_name: async_index.table_name, index_name: async_index.name) + end + end + end + end +end diff --git a/lib/gitlab/database/async_indexes/migration_helpers.rb b/lib/gitlab/database/async_indexes/migration_helpers.rb index e9846dd4e85..c8f6761534c 100644 --- a/lib/gitlab/database/async_indexes/migration_helpers.rb +++ b/lib/gitlab/database/async_indexes/migration_helpers.rb @@ -77,6 +77,38 @@ module Gitlab async_index end + # Prepares an index for asynchronous destruction. + # + # Stores the index information in the postgres_async_indexes table to be removed later. The + # index will be always be removed CONCURRENTLY, so that option does not need to be given. + # + # If the requested index has already been removed, it is not stored in the table for + # asynchronous destruction. + def prepare_async_index_removal(table_name, column_name, options = {}) + index_name = options.fetch(:name) + raise 'prepare_async_index_removal must get an index name defined' if index_name.blank? + + unless index_exists?(table_name, column_name, **options) + Gitlab::AppLogger.warn "Index not removed because it does not exist (this may be due to an aborted migration or similar): table_name: #{table_name}, index_name: #{index_name}" + return + end + + definition = "DROP INDEX CONCURRENTLY #{quote_column_name(index_name)}" + + async_index = PostgresAsyncIndex.find_or_create_by!(name: index_name) do |rec| + rec.table_name = table_name + rec.definition = definition + end + + Gitlab::AppLogger.info( + message: 'Prepared index for async destruction', + table_name: async_index.table_name, + index_name: async_index.name + ) + + async_index + end + def async_index_creation_available? connection.table_exists?(:postgres_async_indexes) end diff --git a/lib/gitlab/database/async_indexes/postgres_async_index.rb b/lib/gitlab/database/async_indexes/postgres_async_index.rb index 6cb40729061..dc932482d40 100644 --- a/lib/gitlab/database/async_indexes/postgres_async_index.rb +++ b/lib/gitlab/database/async_indexes/postgres_async_index.rb @@ -13,6 +13,9 @@ module Gitlab validates :table_name, presence: true, length: { maximum: MAX_IDENTIFIER_LENGTH } validates :definition, presence: true, length: { maximum: MAX_DEFINITION_LENGTH } + scope :to_create, -> { where("definition ILIKE 'CREATE%'") } + scope :to_drop, -> { where("definition ILIKE 'DROP%'") } + def to_s definition end diff --git a/lib/gitlab/database/background_migration/batched_job.rb b/lib/gitlab/database/background_migration/batched_job.rb index 72aa1cfe00b..81898a59da7 100644 --- a/lib/gitlab/database/background_migration/batched_job.rb +++ b/lib/gitlab/database/background_migration/batched_job.rb @@ -112,7 +112,7 @@ module Gitlab end def can_split?(exception) - attempts >= MAX_ATTEMPTS && TIMEOUT_EXCEPTIONS.include?(exception&.class) && batch_size > sub_batch_size + attempts >= MAX_ATTEMPTS && TIMEOUT_EXCEPTIONS.include?(exception&.class) && batch_size > sub_batch_size && batch_size > 1 end def split_and_retry! @@ -121,7 +121,7 @@ module Gitlab new_batch_size = batch_size / 2 - raise SplitAndRetryError, 'Job cannot be split further' if new_batch_size < 1 + break update!(attempts: 0) if new_batch_size < 1 batching_strategy = batched_migration.batch_class.new(connection: self.class.connection) next_batch_bounds = batching_strategy.next_batch( diff --git a/lib/gitlab/database/background_migration/batched_migration.rb b/lib/gitlab/database/background_migration/batched_migration.rb index 9c8db2243f9..6aed1eed994 100644 --- a/lib/gitlab/database/background_migration/batched_migration.rb +++ b/lib/gitlab/database/background_migration/batched_migration.rb @@ -206,7 +206,7 @@ module Gitlab end def health_context - HealthStatus::Context.new([table_name]) + HealthStatus::Context.new(connection, [table_name]) end def hold!(until_time: 10.minutes.from_now) @@ -231,6 +231,12 @@ module Gitlab "BatchedMigration[id: #{id}]" end + def progress + return unless total_tuple_count.to_i > 0 + + 100 * migrated_tuple_count / total_tuple_count + end + private def validate_batched_jobs_status diff --git a/lib/gitlab/database/background_migration/batched_migration_runner.rb b/lib/gitlab/database/background_migration/batched_migration_runner.rb index 1bc2e931391..7224ff2b517 100644 --- a/lib/gitlab/database/background_migration/batched_migration_runner.rb +++ b/lib/gitlab/database/background_migration/batched_migration_runner.rb @@ -144,9 +144,9 @@ module Gitlab end def adjust_migration(active_migration) - signal = HealthStatus.evaluate(active_migration) + signals = HealthStatus.evaluate(active_migration) - if signal.is_a?(HealthStatus::Signals::Stop) + if signals.any?(&:stop?) active_migration.hold! else active_migration.optimize! diff --git a/lib/gitlab/database/background_migration/batched_migration_wrapper.rb b/lib/gitlab/database/background_migration/batched_migration_wrapper.rb index 5f4b2be3da8..ad747a8131d 100644 --- a/lib/gitlab/database/background_migration/batched_migration_wrapper.rb +++ b/lib/gitlab/database/background_migration/batched_migration_wrapper.rb @@ -64,9 +64,10 @@ module Gitlab batch_column: tracking_record.migration_column_name, sub_batch_size: tracking_record.sub_batch_size, pause_ms: tracking_record.pause_ms, + job_arguments: tracking_record.migration_job_arguments, connection: connection) - job_instance.perform(*tracking_record.migration_job_arguments) + job_instance.perform job_instance end diff --git a/lib/gitlab/database/background_migration/health_status.rb b/lib/gitlab/database/background_migration/health_status.rb index 01f9c5eb5fd..9a283074b32 100644 --- a/lib/gitlab/database/background_migration/health_status.rb +++ b/lib/gitlab/database/background_migration/health_status.rb @@ -4,21 +4,29 @@ module Gitlab module Database module BackgroundMigration module HealthStatus + DEFAULT_INIDICATORS = [ + Indicators::AutovacuumActiveOnTable, + Indicators::WriteAheadLog + ].freeze + # Rather than passing along the migration, we use a more explicitly defined context - Context = Struct.new(:tables) + Context = Struct.new(:connection, :tables) - def self.evaluate(migration, indicator = Indicators::AutovacuumActiveOnTable) - signal = begin - indicator.new(migration.health_context).evaluate - rescue StandardError => e - Gitlab::ErrorTracking.track_exception(e, migration_id: migration.id, - job_class_name: migration.job_class_name) - Signals::Unknown.new(indicator, reason: "unexpected error: #{e.message} (#{e.class})") - end + def self.evaluate(migration, indicators = DEFAULT_INIDICATORS) + indicators.map do |indicator| + signal = begin + indicator.new(migration.health_context).evaluate + rescue StandardError => e + Gitlab::ErrorTracking.track_exception(e, migration_id: migration.id, + job_class_name: migration.job_class_name) - log_signal(signal, migration) if signal.log_info? + Signals::Unknown.new(indicator, reason: "unexpected error: #{e.message} (#{e.class})") + end - signal + log_signal(signal, migration) if signal.log_info? + + signal + end end def self.log_signal(signal, migration) diff --git a/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log.rb b/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log.rb new file mode 100644 index 00000000000..d2fb0a8b751 --- /dev/null +++ b/lib/gitlab/database/background_migration/health_status/indicators/write_ahead_log.rb @@ -0,0 +1,74 @@ +# frozen_string_literal: true + +module Gitlab + module Database + module BackgroundMigration + module HealthStatus + module Indicators + class WriteAheadLog + include Gitlab::Utils::StrongMemoize + + LIMIT = 42 + PENDING_WAL_COUNT_SQL = <<~SQL + WITH + current_wal_file AS ( + SELECT pg_walfile_name(pg_current_wal_insert_lsn()) AS pg_walfile_name + ), + current_wal AS ( + SELECT + ('x' || substring(pg_walfile_name, 9, 8))::bit(32)::int AS log, + ('x' || substring(pg_walfile_name, 17, 8))::bit(32)::int AS seg, + pg_walfile_name + FROM current_wal_file + ), + archive_wal AS ( + SELECT + ('x' || substring(last_archived_wal, 9, 8))::bit(32)::int AS log, + ('x' || substring(last_archived_wal, 17, 8))::bit(32)::int AS seg, + last_archived_wal + FROM pg_stat_archiver + ) + SELECT ((current_wal.log - archive_wal.log) * 256) + (current_wal.seg - archive_wal.seg) AS pending_wal_count + FROM current_wal, archive_wal + SQL + + def initialize(context) + @connection = context.connection + end + + def evaluate + return Signals::NotAvailable.new(self.class, reason: 'indicator disabled') unless enabled? + + unless pending_wal_count + return Signals::NotAvailable.new(self.class, reason: 'WAL archive queue can not be calculated') + end + + if pending_wal_count > LIMIT + Signals::Stop.new(self.class, reason: "WAL archive queue is too big") + else + Signals::Normal.new(self.class, reason: 'WAL archive queue is within limit') + end + end + + private + + attr_reader :connection + + def enabled? + Feature.enabled?(:batched_migrations_health_status_wal, type: :ops) + end + + # Returns number of WAL segments pending archival + def pending_wal_count + strong_memoize(:pending_wal_count) do + Gitlab::Database::LoadBalancing::Session.current.use_primary do + connection.execute(PENDING_WAL_COUNT_SQL).to_a.first&.fetch('pending_wal_count') + end + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/database/background_migration/health_status/signals.rb b/lib/gitlab/database/background_migration/health_status/signals.rb index 6cd0ebd1bd0..be741a9d91b 100644 --- a/lib/gitlab/database/background_migration/health_status/signals.rb +++ b/lib/gitlab/database/background_migration/health_status/signals.rb @@ -22,6 +22,10 @@ module Gitlab def log_info? false end + + def stop? + false + end # :nocov: private @@ -38,6 +42,10 @@ module Gitlab def log_info? true end + + def stop? + true + end # :nocov: end diff --git a/lib/gitlab/database/bulk_update.rb b/lib/gitlab/database/bulk_update.rb index b1f9da30585..d68be19047e 100644 --- a/lib/gitlab/database/bulk_update.rb +++ b/lib/gitlab/database/bulk_update.rb @@ -157,7 +157,7 @@ module Gitlab def self.execute(columns, mapping, &to_class) raise ArgumentError if mapping.blank? - entries_by_class = mapping.group_by { |k, v| block_given? ? to_class.call(k) : k.class } + entries_by_class = mapping.group_by { |k, v| to_class ? to_class.call(k) : k.class } entries_by_class.each do |model, entries| Setter.new(model, columns, entries).update! diff --git a/lib/gitlab/database/dynamic_model_helpers.rb b/lib/gitlab/database/dynamic_model_helpers.rb index ad7dea8f0d9..2deb89a0b84 100644 --- a/lib/gitlab/database/dynamic_model_helpers.rb +++ b/lib/gitlab/database/dynamic_model_helpers.rb @@ -32,7 +32,7 @@ module Gitlab def each_batch_range(table_name, connection:, scope: ->(table) { table.all }, of: BATCH_SIZE) each_batch(table_name, connection: connection, scope: scope, of: of) do |batch| - yield batch.pluck('MIN(id), MAX(id)').first + yield batch.pick('MIN(id), MAX(id)') end end end diff --git a/lib/gitlab/database/gitlab_schemas.yml b/lib/gitlab/database/gitlab_schemas.yml index 4a467d18f0a..d05eee7d6e6 100644 --- a/lib/gitlab/database/gitlab_schemas.yml +++ b/lib/gitlab/database/gitlab_schemas.yml @@ -300,6 +300,7 @@ licenses: :gitlab_main lists: :gitlab_main list_user_preferences: :gitlab_main loose_foreign_keys_deleted_records: :gitlab_shared +member_roles: :gitlab_main member_tasks: :gitlab_main members: :gitlab_main merge_request_assignees: :gitlab_main @@ -323,6 +324,10 @@ metrics_dashboard_annotations: :gitlab_main metrics_users_starred_dashboards: :gitlab_main milestone_releases: :gitlab_main milestones: :gitlab_main +ml_candidates: :gitlab_main +ml_experiments: :gitlab_main +ml_candidate_metrics: :gitlab_main +ml_candidate_params: :gitlab_main namespace_admin_notes: :gitlab_main namespace_aggregation_schedules: :gitlab_main namespace_bans: :gitlab_main @@ -331,6 +336,7 @@ namespace_package_settings: :gitlab_main namespace_root_storage_statistics: :gitlab_main namespace_ci_cd_settings: :gitlab_main namespace_settings: :gitlab_main +namespace_details: :gitlab_main namespaces: :gitlab_main namespaces_sync_events: :gitlab_main namespace_statistics: :gitlab_main @@ -529,6 +535,7 @@ user_custom_attributes: :gitlab_main user_details: :gitlab_main user_follow_users: :gitlab_main user_group_callouts: :gitlab_main +user_project_callouts: :gitlab_main user_highest_roles: :gitlab_main user_interacted_projects: :gitlab_main user_permission_export_uploads: :gitlab_main diff --git a/lib/gitlab/database/lock_writes_manager.rb b/lib/gitlab/database/lock_writes_manager.rb new file mode 100644 index 00000000000..cd483d616bb --- /dev/null +++ b/lib/gitlab/database/lock_writes_manager.rb @@ -0,0 +1,80 @@ +# frozen_string_literal: true + +module Gitlab + module Database + class LockWritesManager + TRIGGER_FUNCTION_NAME = 'gitlab_schema_prevent_write' + + def initialize(table_name:, connection:, database_name:, logger: nil) + @table_name = table_name + @connection = connection + @database_name = database_name + @logger = logger + end + + def lock_writes + logger&.info "Database: '#{database_name}', Table: '#{table_name}': Lock Writes".color(:yellow) + sql = <<-SQL + DROP TRIGGER IF EXISTS #{write_trigger_name(table_name)} ON #{table_name}; + CREATE TRIGGER #{write_trigger_name(table_name)} + BEFORE INSERT OR UPDATE OR DELETE OR TRUNCATE + ON #{table_name} + FOR EACH STATEMENT EXECUTE FUNCTION #{TRIGGER_FUNCTION_NAME}(); + SQL + + with_retries(connection) do + connection.execute(sql) + end + end + + def unlock_writes + logger&.info "Database: '#{database_name}', Table: '#{table_name}': Allow Writes".color(:green) + sql = <<-SQL + DROP TRIGGER IF EXISTS #{write_trigger_name(table_name)} ON #{table_name} + SQL + + with_retries(connection) do + connection.execute(sql) + end + end + + private + + attr_reader :table_name, :connection, :database_name, :logger + + def with_retries(connection, &block) + with_statement_timeout_retries do + with_lock_retries(connection) do + yield + end + end + end + + def with_statement_timeout_retries(times = 5) + current_iteration = 1 + begin + yield + rescue ActiveRecord::QueryCanceled => err # rubocop:disable Database/RescueQueryCanceled + if current_iteration <= times + current_iteration += 1 + retry + else + raise err + end + end + end + + def with_lock_retries(connection, &block) + Gitlab::Database::WithLockRetries.new( + klass: "gitlab:db:lock_writes", + logger: logger || Gitlab::AppLogger, + connection: connection + ).run(&block) + end + + def write_trigger_name(table_name) + "gitlab_schema_write_trigger_for_#{table_name}" + end + end + end +end diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb index 4bb1d71ce18..db39524f4f6 100644 --- a/lib/gitlab/database/migration_helpers.rb +++ b/lib/gitlab/database/migration_helpers.rb @@ -505,14 +505,14 @@ module Gitlab # case another unique integer column can be used. Example: :user_id # # rubocop: disable Metrics/AbcSize - def update_column_in_batches(table, column, value, batch_size: nil, batch_column_name: :id) + def update_column_in_batches(table_name, column, value, batch_size: nil, batch_column_name: :id, disable_lock_writes: false) if transaction_open? raise 'update_column_in_batches can not be run inside a transaction, ' \ 'you can disable transactions by calling disable_ddl_transaction! ' \ 'in the body of your migration class' end - table = Arel::Table.new(table) + table = Arel::Table.new(table_name) count_arel = table.project(Arel.star.count.as('count')) count_arel = yield table, count_arel if block_given? @@ -559,7 +559,10 @@ module Gitlab update_arel = yield table, update_arel if block_given? - execute(update_arel.to_sql) + transaction do + execute("SELECT set_config('lock_writes.#{table_name}', 'false', true)") if disable_lock_writes + execute(update_arel.to_sql) + end # There are no more rows left to update. break unless stop_row @@ -1666,7 +1669,9 @@ into similar problems in the future (e.g. when new tables are created). end Gitlab::Database::QueryAnalyzers::RestrictAllowedSchemas.with_suppressed do - update_column_in_batches(table, new, old_value, batch_column_name: batch_column_name) + Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection.with_suppressed do + update_column_in_batches(table, new, old_value, batch_column_name: batch_column_name, disable_lock_writes: true) + end end add_not_null_constraint(table, new) unless old_col.null diff --git a/lib/gitlab/database/migrations/background_migration_helpers.rb b/lib/gitlab/database/migrations/background_migration_helpers.rb index 9bffed43077..25e75a10bb3 100644 --- a/lib/gitlab/database/migrations/background_migration_helpers.rb +++ b/lib/gitlab/database/migrations/background_migration_helpers.rb @@ -76,7 +76,7 @@ module Gitlab max = relation.arel_table[primary_column_name].maximum min = relation.arel_table[primary_column_name].minimum - start_id, end_id = relation.pluck(min, max).first + start_id, end_id = relation.pick(min, max) # `SingleDatabaseWorker.bulk_perform_in` schedules all jobs for # the same time, which is not helpful in most cases where we wish to diff --git a/lib/gitlab/database/migrations/base_background_runner.rb b/lib/gitlab/database/migrations/base_background_runner.rb index 2772502140e..a9440cafd30 100644 --- a/lib/gitlab/database/migrations/base_background_runner.rb +++ b/lib/gitlab/database/migrations/base_background_runner.rb @@ -38,7 +38,7 @@ module Gitlab per_background_migration_result_dir = File.join(@result_dir, migration_name) instrumentation = Instrumentation.new(result_dir: per_background_migration_result_dir) - batch_names = (1..).each.lazy.map { |i| "batch_#{i}"} + batch_names = (1..).each.lazy.map { |i| "batch_#{i}" } jobs.shuffle.each do |j| break if run_until <= Time.current diff --git a/lib/gitlab/database/migrations/batched_background_migration_helpers.rb b/lib/gitlab/database/migrations/batched_background_migration_helpers.rb index 936b986ea07..363fd0598f9 100644 --- a/lib/gitlab/database/migrations/batched_background_migration_helpers.rb +++ b/lib/gitlab/database/migrations/batched_background_migration_helpers.rb @@ -24,7 +24,7 @@ module Gitlab # class must be present in the Gitlab::BackgroundMigration module, and the batch class (if specified) must be # present in the Gitlab::BackgroundMigration::BatchingStrategies module. # - # If migration with same job_class_name, table_name, column_name, and job_aruments already exists, this helper + # If migration with same job_class_name, table_name, column_name, and job_arguments already exists, this helper # will log an warning and not create a new one. # # job_class_name - The background migration job class as a string @@ -107,6 +107,11 @@ module Gitlab status_event: status_event ) + if migration.job_class.respond_to?(:job_arguments_count) && migration.job_class.job_arguments_count != job_arguments.count + raise "Wrong number of job arguments for #{migration.job_class_name} " \ + "(given #{job_arguments.count}, expected #{migration.job_class.job_arguments_count})" + end + # Below `BatchedMigration` attributes were introduced after the # initial `batched_background_migrations` table was created, so any # migrations that ran relying on initial table schema would not know diff --git a/lib/gitlab/database/partitioning/sliding_list_strategy.rb b/lib/gitlab/database/partitioning/sliding_list_strategy.rb index 5cf32d3272c..4b5349f0327 100644 --- a/lib/gitlab/database/partitioning/sliding_list_strategy.rb +++ b/lib/gitlab/database/partitioning/sliding_list_strategy.rb @@ -77,7 +77,6 @@ module Gitlab end def validate_and_fix - return unless Feature.enabled?(:fix_sliding_list_partitioning) return if no_partitions_exist? old_default_value = current_default_value diff --git a/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb index 9cab2c51b3f..dcf457b9d63 100644 --- a/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb +++ b/lib/gitlab/database/partitioning_migration_helpers/backfill_partitioned_table.rb @@ -25,7 +25,7 @@ module Gitlab parent_batch_relation = relation_scoped_to_range(source_table, source_column, start_id, stop_id) parent_batch_relation.each_batch(of: SUB_BATCH_SIZE) do |sub_batch| - sub_start_id, sub_stop_id = sub_batch.pluck(Arel.sql("MIN(#{source_column}), MAX(#{source_column})")).first + sub_start_id, sub_stop_id = sub_batch.pick(Arel.sql("MIN(#{source_column}), MAX(#{source_column})")) bulk_copy.copy_between(sub_start_id, sub_stop_id) sleep(PAUSE_SECONDS) diff --git a/lib/gitlab/database/postgres_hll/buckets.rb b/lib/gitlab/database/postgres_hll/buckets.rb index 76818bbf340..cbc9544d905 100644 --- a/lib/gitlab/database/postgres_hll/buckets.rb +++ b/lib/gitlab/database/postgres_hll/buckets.rb @@ -38,7 +38,7 @@ module Gitlab # # @param other_buckets_hash hash with HyperLogLog structure representation def merge_hash!(other_buckets_hash) - buckets.merge!(other_buckets_hash) {|_key, old, new| new > old ? new : old } + buckets.merge!(other_buckets_hash) { |_key, old, new| new > old ? new : old } end # Serialize instance underlying HyperLogLog structure to JSON format, that can be stored in various persistence layers @@ -61,7 +61,7 @@ module Gitlab num_uniques = ( ((TOTAL_BUCKETS**2) * (0.7213 / (1 + 1.079 / TOTAL_BUCKETS))) / - (num_zero_buckets + buckets.values.sum { |bucket_hash| 2**(-1 * bucket_hash)} ) + (num_zero_buckets + buckets.values.sum { |bucket_hash| 2**(-1 * bucket_hash) } ) ).to_i if num_zero_buckets > 0 && num_uniques < 2.5 * TOTAL_BUCKETS diff --git a/lib/gitlab/database/reindexing.rb b/lib/gitlab/database/reindexing.rb index e13dd3b2058..b96dffc99ac 100644 --- a/lib/gitlab/database/reindexing.rb +++ b/lib/gitlab/database/reindexing.rb @@ -27,6 +27,7 @@ module Gitlab # Hack: Before we do actual reindexing work, create async indexes Gitlab::Database::AsyncIndexes.create_pending_indexes! if Feature.enabled?(:database_async_index_creation, type: :ops) + Gitlab::Database::AsyncIndexes.drop_pending_indexes! if Feature.enabled?(:database_async_index_destruction, type: :ops) automatic_reindexing end diff --git a/lib/gitlab/database/reindexing/reindex_concurrently.rb b/lib/gitlab/database/reindexing/reindex_concurrently.rb index 152935bd734..60fa4deda39 100644 --- a/lib/gitlab/database/reindexing/reindex_concurrently.rb +++ b/lib/gitlab/database/reindexing/reindex_concurrently.rb @@ -20,7 +20,7 @@ module Gitlab def perform raise ReindexError, 'indexes serving an exclusion constraint are currently not supported' if index.exclusion? - raise ReindexError, 'index is a left-over temporary index from a previous reindexing run' if index.name =~ /#{TEMPORARY_INDEX_PATTERN}/ + raise ReindexError, 'index is a left-over temporary index from a previous reindexing run' if index.name =~ /#{TEMPORARY_INDEX_PATTERN}/o # Expression indexes require additional statistics in `pg_statistic`: # select * from pg_statistic where starelid = (select oid from pg_class where relname = 'some_index'); @@ -81,10 +81,10 @@ module Gitlab # Example lingering index name: some_index_ccnew1 # Example prefix: 'some_index' - prefix = lingering_index.name.gsub(/#{TEMPORARY_INDEX_PATTERN}/, '') + prefix = lingering_index.name.gsub(/#{TEMPORARY_INDEX_PATTERN}/o, '') # Example suffix: '_ccnew1' - suffix = lingering_index.name.match(/#{TEMPORARY_INDEX_PATTERN}/)[0] + suffix = lingering_index.name.match(/#{TEMPORARY_INDEX_PATTERN}/o)[0] # Only remove if the lingering index name could have been chosen # as a result of a REINDEX operation (considering that PostgreSQL diff --git a/lib/gitlab/database/with_lock_retries.rb b/lib/gitlab/database/with_lock_retries.rb index f2c5bb9088f..3206c5626c3 100644 --- a/lib/gitlab/database/with_lock_retries.rb +++ b/lib/gitlab/database/with_lock_retries.rb @@ -83,7 +83,7 @@ module Gitlab # @param [Boolean] raise_on_exhaustion whether to raise `AttemptsExhaustedError` when exhausting max attempts # @param [Proc] block of code that will be executed def run(raise_on_exhaustion: false, &block) - raise 'no block given' unless block_given? + raise 'no block given' unless block @block = block diff --git a/lib/gitlab/dependency_linker/base_linker.rb b/lib/gitlab/dependency_linker/base_linker.rb index 76855f2950d..6d114de8ae8 100644 --- a/lib/gitlab/dependency_linker/base_linker.rb +++ b/lib/gitlab/dependency_linker/base_linker.rb @@ -34,9 +34,9 @@ module Gitlab return if external_ref =~ GIT_INVALID_URL_REGEX case external_ref - when /\A#{URL_REGEX}\z/ + when /\A#{URL_REGEX}\z/o external_ref - when /\A#{REPO_REGEX}\z/ + when /\A#{REPO_REGEX}\z/o github_url(external_ref) else package_url(name) diff --git a/lib/gitlab/dependency_linker/cargo_toml_linker.rb b/lib/gitlab/dependency_linker/cargo_toml_linker.rb index 57e0a5f4699..cba4319ce83 100644 --- a/lib/gitlab/dependency_linker/cargo_toml_linker.rb +++ b/lib/gitlab/dependency_linker/cargo_toml_linker.rb @@ -39,7 +39,11 @@ module Gitlab end def toml - @toml ||= TomlRB.parse(plain_text) rescue nil + @toml ||= begin + TomlRB.parse(plain_text) + rescue StandardError + nil + end end end end diff --git a/lib/gitlab/dependency_linker/composer_json_linker.rb b/lib/gitlab/dependency_linker/composer_json_linker.rb index 4b8862b31ee..965ed8bb95e 100644 --- a/lib/gitlab/dependency_linker/composer_json_linker.rb +++ b/lib/gitlab/dependency_linker/composer_json_linker.rb @@ -13,7 +13,7 @@ module Gitlab end def package_url(name) - "https://packagist.org/packages/#{name}" if name =~ /\A#{REPO_REGEX}\z/ + "https://packagist.org/packages/#{name}" if name =~ /\A#{REPO_REGEX}\z/o end end end diff --git a/lib/gitlab/dependency_linker/godeps_json_linker.rb b/lib/gitlab/dependency_linker/godeps_json_linker.rb index 9166e9091ac..049a807b760 100644 --- a/lib/gitlab/dependency_linker/godeps_json_linker.rb +++ b/lib/gitlab/dependency_linker/godeps_json_linker.rb @@ -12,10 +12,10 @@ module Gitlab def link_dependencies link_json('ImportPath') do |path| case path - when %r{\A(?<repo>github\.com/#{REPO_REGEX})/(?<path>.+)\z} + when %r{\A(?<repo>github\.com/#{REPO_REGEX})/(?<path>.+)\z}o "https://#{$~[:repo]}/tree/master/#{$~[:path]}" - when %r{\A(?<repo>gitlab\.com/#{NESTED_REPO_REGEX})\.git/(?<path>.+)\z}, - %r{\A(?<repo>gitlab\.com/#{REPO_REGEX})/(?<path>.+)\z} + when %r{\A(?<repo>gitlab\.com/#{NESTED_REPO_REGEX})\.git/(?<path>.+)\z}o, + %r{\A(?<repo>gitlab\.com/#{REPO_REGEX})/(?<path>.+)\z}o "https://#{$~[:repo]}/-/tree/master/#{$~[:path]}" when /\Agolang\.org/ diff --git a/lib/gitlab/dependency_linker/json_linker.rb b/lib/gitlab/dependency_linker/json_linker.rb index 86dc7efb0d9..15c17132283 100644 --- a/lib/gitlab/dependency_linker/json_linker.rb +++ b/lib/gitlab/dependency_linker/json_linker.rb @@ -39,7 +39,11 @@ module Gitlab end def json - @json ||= Gitlab::Json.parse(plain_text) rescue nil + @json ||= begin + Gitlab::Json.parse(plain_text) + rescue StandardError + nil + end end end end diff --git a/lib/gitlab/dependency_linker/podspec_linker.rb b/lib/gitlab/dependency_linker/podspec_linker.rb index 14abd3999c4..f6da17efff4 100644 --- a/lib/gitlab/dependency_linker/podspec_linker.rb +++ b/lib/gitlab/dependency_linker/podspec_linker.rb @@ -14,10 +14,10 @@ module Gitlab def link_dependencies link_method_call('homepage', URL_REGEX, &:itself) - link_regex(/(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]/, &:itself) + link_regex(/(git:|:git\s*=>)\s*['"](?<name>#{URL_REGEX})['"]/o, &:itself) link_method_call('license', &method(:license_url)) - link_regex(/license\s*=\s*\{\s*(type:|:type\s*=>)\s*#{STRING_REGEX}/, &method(:license_url)) + link_regex(/license\s*=\s*\{\s*(type:|:type\s*=>)\s*#{STRING_REGEX}/o, &method(:license_url)) link_method_call('dependency', &method(:package_url)) end diff --git a/lib/gitlab/diff/file.rb b/lib/gitlab/diff/file.rb index 8c55652da43..5583c896803 100644 --- a/lib/gitlab/diff/file.rb +++ b/lib/gitlab/diff/file.rb @@ -41,8 +41,7 @@ module Gitlab @unfolded = false # Ensure items are collected in the the batch - new_blob_lazy - old_blob_lazy + add_blobs_to_batch_loader end def use_semantic_ipynb_diff? @@ -382,6 +381,11 @@ module Gitlab file_path.ends_with?('.ipynb') end + def add_blobs_to_batch_loader + new_blob_lazy + old_blob_lazy + end + private def diffable_by_attribute? diff --git a/lib/gitlab/diff/highlight_cache.rb b/lib/gitlab/diff/highlight_cache.rb index 8e9dc3a305f..7cfe0086f57 100644 --- a/lib/gitlab/diff/highlight_cache.rb +++ b/lib/gitlab/diff/highlight_cache.rb @@ -6,7 +6,7 @@ module Gitlab include Gitlab::Utils::Gzip include Gitlab::Utils::StrongMemoize - EXPIRATION = 1.week + EXPIRATION = 1.day VERSION = 2 delegate :diffable, to: :@diff_collection @@ -69,14 +69,14 @@ module Gitlab def key strong_memoize(:redis_key) do - [ - 'highlighted-diff-files', - diffable.cache_key, - VERSION, + options = [ diff_options, Feature.enabled?(:use_marker_ranges, diffable.project), Feature.enabled?(:diff_line_syntax_highlighting, diffable.project) - ].join(":") + ] + options_for_key = OpenSSL::Digest::SHA256.hexdigest(options.join) + + ['highlighted-diff-files', diffable.cache_key, VERSION, options_for_key].join(":") end end diff --git a/lib/gitlab/diff/parser.rb b/lib/gitlab/diff/parser.rb index adb711ca89f..924c28e3db5 100644 --- a/lib/gitlab/diff/parser.rb +++ b/lib/gitlab/diff/parser.rb @@ -28,9 +28,11 @@ module Gitlab if line =~ /^@@ -/ type = "match" - line_old = line.match(/\-[0-9]*/)[0].to_i.abs rescue 0 - line_new = line.match(/\+[0-9]*/)[0].to_i.abs rescue 0 + diff_hunk = Gitlab::WordDiff::Segments::DiffHunk.new(line) + line_old = diff_hunk.pos_old + line_new = diff_hunk.pos_new + # not using diff_hunk.first_line? because of defaults next if line_old <= 1 && line_new <= 1 # top of file yielder << Gitlab::Diff::Line.new(full_line, type, line_obj_index, line_old, line_new, parent_file: diff_file) diff --git a/lib/gitlab/diff/rendered/notebook/diff_file.rb b/lib/gitlab/diff/rendered/notebook/diff_file.rb index 3e1652bd318..31cbdc156cf 100644 --- a/lib/gitlab/diff/rendered/notebook/diff_file.rb +++ b/lib/gitlab/diff/rendered/notebook/diff_file.rb @@ -56,7 +56,7 @@ module Gitlab ) lines.zip(line_positions_at_source_diff(lines, transformed_blocks)) - .map { |line, positions| mutate_line(line, positions, lines_in_source)} + .map { |line, positions| mutate_line(line, positions, lines_in_source) } end end diff --git a/lib/gitlab/diff/rendered/notebook/diff_file_helper.rb b/lib/gitlab/diff/rendered/notebook/diff_file_helper.rb index f381792953e..ad709a79f30 100644 --- a/lib/gitlab/diff/rendered/notebook/diff_file_helper.rb +++ b/lib/gitlab/diff/rendered/notebook/diff_file_helper.rb @@ -77,8 +77,8 @@ module Gitlab def lines_in_source_diff(source_diff_lines, is_deleted_file, is_added_file) { - from: is_added_file ? Set[] : source_diff_lines.map {|l| l.old_pos}.to_set, - to: is_deleted_file ? Set[] : source_diff_lines.map {|l| l.new_pos}.to_set + from: is_added_file ? Set[] : source_diff_lines.map { |l| l.old_pos }.to_set, + to: is_deleted_file ? Set[] : source_diff_lines.map { |l| l.new_pos }.to_set } end diff --git a/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512.rb b/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512.rb new file mode 100644 index 00000000000..4bfb5f9e64c --- /dev/null +++ b/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512.rb @@ -0,0 +1,28 @@ +# frozen_string_literal: true + +module Gitlab + module DoorkeeperSecretStoring + class Pbkdf2Sha512 < ::Doorkeeper::SecretStoring::Base + STRETCHES = 20_000 + # An empty salt is used because we need to look tokens up solely by + # their hashed value. Additionally, tokens are always cryptographically + # pseudo-random and unique, therefore salting provides no + # additional security. + SALT = '' + + def self.transform_secret(plain_secret) + return plain_secret unless Feature.enabled?(:hash_oauth_tokens) + + Devise::Pbkdf2Encryptable::Encryptors::Pbkdf2Sha512.digest(plain_secret, STRETCHES, SALT) + end + + ## + # Determines whether this strategy supports restoring + # secrets from the database. This allows detecting users + # trying to use a non-restorable strategy with +reuse_access_tokens+. + def self.allows_restoring_secrets? + false + end + end + end +end diff --git a/lib/gitlab/email/message/in_product_marketing/helper.rb b/lib/gitlab/email/message/in_product_marketing/helper.rb index 0a0e55c2999..73d1e0743cc 100644 --- a/lib/gitlab/email/message/in_product_marketing/helper.rb +++ b/lib/gitlab/email/message/in_product_marketing/helper.rb @@ -67,7 +67,7 @@ module Gitlab def list(array) case format when :html - tag.ul { array.map { |item| tag.li item} } + tag.ul { array.map { |item| tag.li item } } else '- ' + array.join("\n- ") end diff --git a/lib/gitlab/error_tracking/error_repository/open_api_strategy.rb b/lib/gitlab/error_tracking/error_repository/open_api_strategy.rb index e3eae20c520..cc822e4c10b 100644 --- a/lib/gitlab/error_tracking/error_repository/open_api_strategy.rb +++ b/lib/gitlab/error_tracking/error_repository/open_api_strategy.rb @@ -13,6 +13,7 @@ module Gitlab config.scheme = api_url.scheme config.host = [api_url.host, api_url.port].compact.join(':') config.server_index = nil + config.api_key['internalToken'] = api_key config.logger = Gitlab::AppLogger end end @@ -25,7 +26,7 @@ module Gitlab end def find_error(id) - api = open_api::ErrorsApi.new + api = build_api_client error = api.get_error(project_id, id) to_sentry_detailed_error(error) @@ -43,7 +44,7 @@ module Gitlab limit: limit }.compact - api = open_api::ErrorsApi.new + api = build_api_client errors, _status, headers = api.list_errors_with_http_info(project_id, opts) pagination = pagination_from_headers(headers) @@ -64,7 +65,7 @@ module Gitlab event = newest_event_for(id) return unless event - api = open_api::ErrorsApi.new + api = build_api_client error = api.get_error(project_id, id) return unless error @@ -79,7 +80,7 @@ module Gitlab body = open_api::ErrorUpdatePayload.new(opts) - api = open_api::ErrorsApi.new + api = build_api_client api.update_error(project_id, id, body) true @@ -100,7 +101,7 @@ module Gitlab config.base_path ].join('') - "#{base_url}/projects/api/#{project_id}" + "#{base_url}/projects/#{project_id}" end private @@ -108,7 +109,7 @@ module Gitlab def event_for(id, sort:) opts = { sort: sort, limit: 1 } - api = open_api::ErrorsApi.new + api = build_api_client api.list_events(project_id, id, opts).first rescue ErrorTrackingOpenAPI::ApiError => e log_exception(e) @@ -234,6 +235,14 @@ module Gitlab URI(url) end + def api_key + Gitlab::CurrentSettings.current_application_settings.error_tracking_access_token + end + + def build_api_client + open_api::ErrorsApi.new + end + def log_exception(exception) params = { http_code: exception.code, diff --git a/lib/gitlab/event_store.rb b/lib/gitlab/event_store.rb index 4955e873688..b45970cb45a 100644 --- a/lib/gitlab/event_store.rb +++ b/lib/gitlab/event_store.rb @@ -40,6 +40,17 @@ module Gitlab store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Pages::PageDeletedEvent store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectDeletedEvent store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectCreatedEvent + store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectPathChangedEvent + store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectArchivedEvent + store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Projects::ProjectTransferedEvent + store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupTransferedEvent + store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupPathChangedEvent + store.subscribe ::Pages::InvalidateDomainCacheWorker, to: ::Groups::GroupDeletedEvent + + store.subscribe ::MergeRequests::CreateApprovalEventWorker, to: ::MergeRequests::ApprovedEvent + store.subscribe ::MergeRequests::CreateApprovalNoteWorker, to: ::MergeRequests::ApprovedEvent + store.subscribe ::MergeRequests::ResolveTodosAfterApprovalWorker, to: ::MergeRequests::ApprovedEvent + store.subscribe ::MergeRequests::ExecuteApprovalHooksWorker, to: ::MergeRequests::ApprovedEvent end private_class_method :configure! end diff --git a/lib/gitlab/exclusive_lease_helpers/sleeping_lock.rb b/lib/gitlab/exclusive_lease_helpers/sleeping_lock.rb index 52035220a71..7ef3e738481 100644 --- a/lib/gitlab/exclusive_lease_helpers/sleeping_lock.rb +++ b/lib/gitlab/exclusive_lease_helpers/sleeping_lock.rb @@ -5,6 +5,8 @@ module Gitlab # Wrapper around ExclusiveLease that adds retry logic class SleepingLock delegate :cancel, to: :@lease + MAX_ATTEMPTS = 65 + DEFAULT_ATTEMPTS = 10 def initialize(key, timeout:, delay:) @lease = ::Gitlab::ExclusiveLease.new(key, timeout: timeout) @@ -12,9 +14,9 @@ module Gitlab @attempts = 0 end - def obtain(max_attempts) + def obtain(max_attempts = DEFAULT_ATTEMPTS) until held? - raise FailedToObtainLockError, 'Failed to obtain a lock' if attempts >= max_attempts + raise FailedToObtainLockError, 'Failed to obtain a lock' if attempts >= [max_attempts, MAX_ATTEMPTS].min sleep(sleep_sec) unless first_attempt? try_obtain diff --git a/lib/gitlab/form_builders/gitlab_ui_form_builder.rb b/lib/gitlab/form_builders/gitlab_ui_form_builder.rb index 9174ca165cd..ea98f6b2eec 100644 --- a/lib/gitlab/form_builders/gitlab_ui_form_builder.rb +++ b/lib/gitlab/form_builders/gitlab_ui_form_builder.rb @@ -3,6 +3,20 @@ module Gitlab module FormBuilders class GitlabUiFormBuilder < ActionView::Helpers::FormBuilder + def submit(value = nil, options = {}) + if options[:pajamas_button] + @template.render Pajamas::ButtonComponent.new( + variant: :confirm, + type: :submit, + button_options: options.except(:pajamas_button) + ) do + value + end + else + super + end + end + def gitlab_ui_checkbox_component( method, label = nil, @@ -45,6 +59,10 @@ module Gitlab ).render_in(@template, &block) end + def gitlab_ui_datepicker(method, options = {}) + @template.text_field @object_name, method, options.merge(class: "datepicker form-control gl-form-input") + end + private def format_options(options) diff --git a/lib/gitlab/git.rb b/lib/gitlab/git.rb index 882bd57eb1d..4b9f2ababc8 100644 --- a/lib/gitlab/git.rb +++ b/lib/gitlab/git.rb @@ -47,11 +47,11 @@ module Gitlab end def tag_ref?(ref) - ref =~ /^#{TAG_REF_PREFIX}.+/ + ref =~ /^#{TAG_REF_PREFIX}.+/o end def branch_ref?(ref) - ref =~ /^#{BRANCH_REF_PREFIX}.+/ + ref =~ /^#{BRANCH_REF_PREFIX}.+/o end def blank_ref?(ref) diff --git a/lib/gitlab/git/remote_repository.rb b/lib/gitlab/git/remote_repository.rb deleted file mode 100644 index 0ea009930b0..00000000000 --- a/lib/gitlab/git/remote_repository.rb +++ /dev/null @@ -1,72 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Git - # - # When a Gitaly call involves two repositories instead of one we cannot - # assume that both repositories are on the same Gitaly server. In this - # case we need to make a distinction between the repository that the - # call is being made on (a Repository instance), and the "other" - # repository (a RemoteRepository instance). This is the reason why we - # have the RemoteRepository class in Gitlab::Git. - # - # When you make changes, be aware that gitaly-ruby sub-classes this - # class. - # - class RemoteRepository - attr_reader :relative_path, :gitaly_repository - - def initialize(repository) - @relative_path = repository.relative_path - @gitaly_repository = repository.gitaly_repository - - # These instance variables will not be available in gitaly-ruby, where - # we have no disk access to this repository. - @repository = repository - end - - def empty? - # We will override this implementation in gitaly-ruby because we cannot - # use '@repository' there. - # - # Caches and memoization used on the Rails side - !@repository.exists? || @repository.empty? - end - - def commit_id(revision) - # We will override this implementation in gitaly-ruby because we cannot - # use '@repository' there. - @repository.commit(revision)&.sha - end - - def branch_exists?(name) - # We will override this implementation in gitaly-ruby because we cannot - # use '@repository' there. - @repository.branch_exists?(name) - end - - # Compares self to a Gitlab::Git::Repository. This implementation uses - # 'self.gitaly_repository' so that it will also work in the - # GitalyRemoteRepository subclass defined in gitaly-ruby. - def same_repository?(other_repository) - gitaly_repository.storage_name == other_repository.storage && - gitaly_repository.relative_path == other_repository.relative_path - end - - def path - @repository.path - end - - private - - # Must return an object that responds to 'address' and 'storage'. - def gitaly_client - Gitlab::GitalyClient - end - - def storage - gitaly_repository.storage_name - end - end - end -end diff --git a/lib/gitlab/git/repository.rb b/lib/gitlab/git/repository.rb index d7f892ae9d9..ad655fedb6d 100644 --- a/lib/gitlab/git/repository.rb +++ b/lib/gitlab/git/repository.rb @@ -135,6 +135,13 @@ module Gitlab gitaly_ref_client.find_tag(name) end rescue CommandError + # Gitaly used to return an `Internal` error in case the tag wasn't found, which is being translated to + # `CommandError` by the wrapper. This has been converted in v15.3.0 to instead return a structured + # error with a `tag_not_found` error, so rescuing from `Internal` errors can be removed in v15.4.0 and + # later. + rescue Gitlab::Git::UnknownRef + # This is the new error returned by `find_tag`, which knows to translate the structured error returned + # by Gitaly when the tag does not exist. end def local_branches(sort_by: nil, pagination_params: nil) @@ -910,7 +917,7 @@ module Gitlab def multi_action( user, branch_name:, message:, actions:, author_email: nil, author_name: nil, - start_branch_name: nil, start_sha: nil, start_repository: self, + start_branch_name: nil, start_sha: nil, start_repository: nil, force: false) wrapped_gitaly_errors do @@ -930,6 +937,12 @@ module Gitlab gitaly_repository_client.set_full_path(full_path) end + def full_path + wrapped_gitaly_errors do + gitaly_repository_client.full_path + end + end + def disconnect_alternates wrapped_gitaly_errors do gitaly_repository_client.disconnect_alternates diff --git a/lib/gitlab/git/rugged_impl/use_rugged.rb b/lib/gitlab/git/rugged_impl/use_rugged.rb index dae208e6955..632b4133f2e 100644 --- a/lib/gitlab/git/rugged_impl/use_rugged.rb +++ b/lib/gitlab/git/rugged_impl/use_rugged.rb @@ -10,6 +10,7 @@ module Gitlab # Disable Rugged auto-detect(can_use_disk?) when Puma threads>1 # https://gitlab.com/gitlab-org/gitlab/issues/119326 return false if running_puma_with_multiple_threads? + return false if Feature.enabled?(:skip_rugged_auto_detect, type: :ops) Gitlab::GitalyClient.can_use_disk?(repo.storage) end diff --git a/lib/gitlab/git_access.rb b/lib/gitlab/git_access.rb index 66fd7aaedea..1c5ad650678 100644 --- a/lib/gitlab/git_access.rb +++ b/lib/gitlab/git_access.rb @@ -446,6 +446,7 @@ module Gitlab when Key actor.user when :ci + Gitlab::AppJsonLogger.info(message: 'Actor was :ci', project_id: project.id) nil end end diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb index 35d3ddf5d7f..c5c6ec1cdfa 100644 --- a/lib/gitlab/gitaly_client/operation_service.rb +++ b/lib/gitlab/gitaly_client/operation_service.rb @@ -44,8 +44,26 @@ module Gitlab end Gitlab::Git::Tag.new(@repository, response.tag) - rescue GRPC::FailedPrecondition => e - raise Gitlab::Git::Repository::InvalidRef, e + rescue GRPC::BadStatus => e + detailed_error = GitalyClient.decode_detailed_error(e) + + case detailed_error&.error + when :access_check + access_check_error = detailed_error.access_check + # These messages were returned from internal/allowed API calls + raise Gitlab::Git::PreReceiveError.new(fallback_message: access_check_error.error_message) + when :custom_hook + raise Gitlab::Git::PreReceiveError.new(custom_hook_error_message(detailed_error.custom_hook), + fallback_message: e.details) + when :reference_exists + raise Gitlab::Git::Repository::TagExistsError + else + if e.code == GRPC::Core::StatusCodes::FAILED_PRECONDITION + raise Gitlab::Git::Repository::InvalidRef, e + end + + raise + end end def user_create_branch(branch_name, user, start_point) @@ -394,7 +412,7 @@ module Gitlab response = GitalyClient.call(@repository.storage, :operation_service, :user_commit_files, req_enum, timeout: GitalyClient.long_timeout, - remote_storage: start_repository.storage) + remote_storage: start_repository&.storage) if (pre_receive_error = response.pre_receive_error.presence) raise Gitlab::Git::PreReceiveError, pre_receive_error @@ -517,7 +535,7 @@ module Gitlab commit_author_name: encode_binary(author_name), commit_author_email: encode_binary(author_email), start_branch_name: encode_binary(start_branch_name), - start_repository: start_repository.gitaly_repository, + start_repository: start_repository&.gitaly_repository, force: force, start_sha: encode_binary(start_sha), timestamp: Google::Protobuf::Timestamp.new(seconds: Time.now.utc.to_i) diff --git a/lib/gitlab/gitaly_client/ref_service.rb b/lib/gitlab/gitaly_client/ref_service.rb index 31e1406356f..42f9c165610 100644 --- a/lib/gitlab/gitaly_client/ref_service.rb +++ b/lib/gitlab/gitaly_client/ref_service.rb @@ -120,6 +120,16 @@ module Gitlab return unless tag Gitlab::Git::Tag.new(@repository, tag) + rescue GRPC::BadStatus => e + detailed_error = GitalyClient.decode_detailed_error(e) + + case detailed_error&.error + when :tag_not_found + raise Gitlab::Git::UnknownRef, "tag does not exist: #{tag_name}" + else + # When this is not a know structured error we simply re-raise the exception. + raise e + end end def delete_refs(refs: [], except_with_prefixes: []) diff --git a/lib/gitlab/gitaly_client/repository_service.rb b/lib/gitlab/gitaly_client/repository_service.rb index 5adb8d946a0..04d6f92e8d8 100644 --- a/lib/gitlab/gitaly_client/repository_service.rb +++ b/lib/gitlab/gitaly_client/repository_service.rb @@ -271,6 +271,18 @@ module Gitlab nil end + def full_path + response = GitalyClient.call( + @storage, + :repository_service, + :full_path, + Gitaly::FullPathRequest.new(repository: @gitaly_repo), + timeout: GitalyClient.fast_timeout + ) + + response.path.presence + end + def license_short_name request = Gitaly::FindLicenseRequest.new(repository: @gitaly_repo) diff --git a/lib/gitlab/github_import/client.rb b/lib/gitlab/github_import/client.rb index d2495b32800..11a41149274 100644 --- a/lib/gitlab/github_import/client.rb +++ b/lib/gitlab/github_import/client.rb @@ -107,7 +107,7 @@ module Gitlab # # rubocop: disable GitlabSecurity/PublicSend def each_page(method, *args, &block) - return to_enum(__method__, method, *args) unless block_given? + return to_enum(__method__, method, *args) unless block page = if args.last.is_a?(Hash) && args.last[:page] @@ -134,7 +134,7 @@ module Gitlab # method - The method to send to Octokit for querying data. # args - Any arguments to pass to the Octokit method. def each_object(method, *args, &block) - return to_enum(__method__, method, *args) unless block_given? + return to_enum(__method__, method, *args) unless block each_page(method, *args) do |page| page.objects.each do |object| diff --git a/lib/gitlab/github_import/importer/events/base_importer.rb b/lib/gitlab/github_import/importer/events/base_importer.rb new file mode 100644 index 00000000000..9ab1d916d33 --- /dev/null +++ b/lib/gitlab/github_import/importer/events/base_importer.rb @@ -0,0 +1,36 @@ +# frozen_string_literal: true + +module Gitlab + module GithubImport + module Importer + module Events + # Base class for importing issue events during project import from GitHub + class BaseImporter + # project - An instance of `Project`. + # client - An instance of `Gitlab::GithubImport::Client`. + def initialize(project, client) + @project = project + @user_finder = UserFinder.new(project, client) + end + + # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`. + def execute(issue_event) + raise NotImplementedError + end + + private + + attr_reader :project, :user_finder + + def author_id(issue_event, author_key: :actor) + user_finder.author_id_for(issue_event, author_key: author_key).first + end + + def issuable_db_id(object) + IssuableFinder.new(project, object).database_id + end + end + end + end + end +end diff --git a/lib/gitlab/github_import/importer/events/changed_assignee.rb b/lib/gitlab/github_import/importer/events/changed_assignee.rb new file mode 100644 index 00000000000..c8f6335e4a8 --- /dev/null +++ b/lib/gitlab/github_import/importer/events/changed_assignee.rb @@ -0,0 +1,52 @@ +# frozen_string_literal: true + +module Gitlab + module GithubImport + module Importer + module Events + class ChangedAssignee < BaseImporter + def execute(issue_event) + assignee_id = author_id(issue_event, author_key: :assignee) + assigner_id = author_id(issue_event, author_key: :assigner) + + note_body = parse_body(issue_event, assigner_id, assignee_id) + + create_note(issue_event, note_body, assigner_id) + end + + private + + def create_note(issue_event, note_body, assigner_id) + Note.create!( + system: true, + noteable_type: Issue.name, + noteable_id: issuable_db_id(issue_event), + project: project, + author_id: assigner_id, + note: note_body, + system_note_metadata: SystemNoteMetadata.new( + { + action: "assignee", + created_at: issue_event.created_at, + updated_at: issue_event.created_at + } + ), + created_at: issue_event.created_at, + updated_at: issue_event.created_at + ) + end + + def parse_body(issue_event, assigner_id, assignee_id) + Gitlab::I18n.with_default_locale do + if issue_event.event == "unassigned" + "unassigned #{User.find(assigner_id).to_reference}" + else + "assigned to #{User.find(assignee_id).to_reference}" + end + end + end + end + end + end + end +end diff --git a/lib/gitlab/github_import/importer/events/changed_label.rb b/lib/gitlab/github_import/importer/events/changed_label.rb index 6c408158b02..818a9202745 100644 --- a/lib/gitlab/github_import/importer/events/changed_label.rb +++ b/lib/gitlab/github_import/importer/events/changed_label.rb @@ -4,25 +4,17 @@ module Gitlab module GithubImport module Importer module Events - class ChangedLabel - def initialize(project, user_id) - @project = project - @user_id = user_id - end - - # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`. + class ChangedLabel < BaseImporter def execute(issue_event) create_event(issue_event) end private - attr_reader :project, :user_id - def create_event(issue_event) ResourceLabelEvent.create!( - issue_id: issue_event.issue_db_id, - user_id: user_id, + issue_id: issuable_db_id(issue_event), + user_id: author_id(issue_event), label_id: label_finder.id_for(issue_event.label_title), action: action(issue_event.event), created_at: issue_event.created_at diff --git a/lib/gitlab/github_import/importer/events/changed_milestone.rb b/lib/gitlab/github_import/importer/events/changed_milestone.rb new file mode 100644 index 00000000000..3164c041dc3 --- /dev/null +++ b/lib/gitlab/github_import/importer/events/changed_milestone.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +module Gitlab + module GithubImport + module Importer + module Events + class ChangedMilestone < BaseImporter + # GitHub API doesn't provide the historical state of an issue for + # de/milestoned issue events. So we'll assign the default state to + # those events that are imported from GitHub. + DEFAULT_STATE = Issue.available_states[:opened] + + def execute(issue_event) + create_event(issue_event) + end + + private + + def create_event(issue_event) + ResourceMilestoneEvent.create!( + issue_id: issuable_db_id(issue_event), + user_id: author_id(issue_event), + created_at: issue_event.created_at, + milestone_id: project.milestones.find_by_title(issue_event.milestone_title)&.id, + action: action(issue_event.event), + state: DEFAULT_STATE + ) + end + + def action(event_type) + return ResourceMilestoneEvent.actions[:remove] if event_type == 'demilestoned' + + ResourceMilestoneEvent.actions[:add] + end + end + end + end + end +end diff --git a/lib/gitlab/github_import/importer/events/closed.rb b/lib/gitlab/github_import/importer/events/closed.rb index 8b2136c9b24..ca8730d0f27 100644 --- a/lib/gitlab/github_import/importer/events/closed.rb +++ b/lib/gitlab/github_import/importer/events/closed.rb @@ -4,15 +4,7 @@ module Gitlab module GithubImport module Importer module Events - class Closed - attr_reader :project, :user_id - - def initialize(project, user_id) - @project = project - @user_id = user_id - end - - # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`. + class Closed < BaseImporter def execute(issue_event) create_event(issue_event) create_state_event(issue_event) @@ -23,10 +15,10 @@ module Gitlab def create_event(issue_event) Event.create!( project_id: project.id, - author_id: user_id, + author_id: author_id(issue_event), action: 'closed', target_type: Issue.name, - target_id: issue_event.issue_db_id, + target_id: issuable_db_id(issue_event), created_at: issue_event.created_at, updated_at: issue_event.created_at ) @@ -34,8 +26,8 @@ module Gitlab def create_state_event(issue_event) ResourceStateEvent.create!( - user_id: user_id, - issue_id: issue_event.issue_db_id, + user_id: author_id(issue_event), + issue_id: issuable_db_id(issue_event), source_commit: issue_event.commit_id, state: 'closed', close_after_error_tracking_resolve: false, diff --git a/lib/gitlab/github_import/importer/events/cross_referenced.rb b/lib/gitlab/github_import/importer/events/cross_referenced.rb index 20b902cfe50..89fc1bdeb09 100644 --- a/lib/gitlab/github_import/importer/events/cross_referenced.rb +++ b/lib/gitlab/github_import/importer/events/cross_referenced.rb @@ -4,15 +4,7 @@ module Gitlab module GithubImport module Importer module Events - class CrossReferenced - attr_reader :project, :user_id - - def initialize(project, user_id) - @project = project - @user_id = user_id - end - - # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`. + class CrossReferenced < BaseImporter def execute(issue_event) mentioned_in_record_class = mentioned_in_type(issue_event) mentioned_in_number = issue_event.source.dig(:issue, :number) @@ -21,14 +13,15 @@ module Gitlab ) return if mentioned_in_record.nil? + user_id = author_id(issue_event) note_body = cross_reference_note_content(mentioned_in_record.gfm_reference(project)) - track_activity(mentioned_in_record_class) - create_note(issue_event, note_body) + track_activity(mentioned_in_record_class, user_id) + create_note(issue_event, note_body, user_id) end private - def track_activity(mentioned_in_class) + def track_activity(mentioned_in_class, user_id) return if mentioned_in_class != Issue Gitlab::UsageDataCounters::HLLRedisCounter.track_event( @@ -37,11 +30,11 @@ module Gitlab ) end - def create_note(issue_event, note_body) + def create_note(issue_event, note_body, user_id) Note.create!( system: true, noteable_type: Issue.name, - noteable_id: issue_event.issue_db_id, + noteable_id: issuable_db_id(issue_event), project: project, author_id: user_id, note: note_body, @@ -73,7 +66,7 @@ module Gitlab iid: number, issuable_type: record_class.name ) - Gitlab::GithubImport::IssuableFinder.new(project, mentioned_in_adapter).database_id + issuable_db_id(mentioned_in_adapter) end def cross_reference_note_content(gfm_reference) diff --git a/lib/gitlab/github_import/importer/events/renamed.rb b/lib/gitlab/github_import/importer/events/renamed.rb index 6a11c492210..96d112b04c6 100644 --- a/lib/gitlab/github_import/importer/events/renamed.rb +++ b/lib/gitlab/github_import/importer/events/renamed.rb @@ -4,27 +4,19 @@ module Gitlab module GithubImport module Importer module Events - class Renamed - def initialize(project, user_id) - @project = project - @user_id = user_id - end - - # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent` + class Renamed < BaseImporter def execute(issue_event) Note.create!(note_params(issue_event)) end private - attr_reader :project, :user_id - def note_params(issue_event) { - noteable_id: issue_event.issue_db_id, + noteable_id: issuable_db_id(issue_event), noteable_type: Issue.name, project_id: project.id, - author_id: user_id, + author_id: author_id(issue_event), note: parse_body(issue_event), system: true, created_at: issue_event.created_at, diff --git a/lib/gitlab/github_import/importer/events/reopened.rb b/lib/gitlab/github_import/importer/events/reopened.rb index c0f3802bc46..b75344bf817 100644 --- a/lib/gitlab/github_import/importer/events/reopened.rb +++ b/lib/gitlab/github_import/importer/events/reopened.rb @@ -4,15 +4,7 @@ module Gitlab module GithubImport module Importer module Events - class Reopened - attr_reader :project, :user_id - - def initialize(project, user_id) - @project = project - @user_id = user_id - end - - # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`. + class Reopened < BaseImporter def execute(issue_event) create_event(issue_event) create_state_event(issue_event) @@ -23,10 +15,10 @@ module Gitlab def create_event(issue_event) Event.create!( project_id: project.id, - author_id: user_id, + author_id: author_id(issue_event), action: 'reopened', target_type: Issue.name, - target_id: issue_event.issue_db_id, + target_id: issuable_db_id(issue_event), created_at: issue_event.created_at, updated_at: issue_event.created_at ) @@ -34,8 +26,8 @@ module Gitlab def create_state_event(issue_event) ResourceStateEvent.create!( - user_id: user_id, - issue_id: issue_event.issue_db_id, + user_id: author_id(issue_event), + issue_id: issuable_db_id(issue_event), state: 'reopened', created_at: issue_event.created_at ) diff --git a/lib/gitlab/github_import/importer/issue_event_importer.rb b/lib/gitlab/github_import/importer/issue_event_importer.rb index e451af61ec3..ef456e56ee1 100644 --- a/lib/gitlab/github_import/importer/issue_event_importer.rb +++ b/lib/gitlab/github_import/importer/issue_event_importer.rb @@ -4,7 +4,7 @@ module Gitlab module GithubImport module Importer class IssueEventImporter - attr_reader :issue_event, :project, :client, :user_finder + attr_reader :issue_event, :project, :client # issue_event - An instance of `Gitlab::GithubImport::Representation::IssueEvent`. # project - An instance of `Project`. @@ -13,26 +13,16 @@ module Gitlab @issue_event = issue_event @project = project @client = client - @user_finder = UserFinder.new(project, client) end + # TODO: Add MergeRequest events support + # https://gitlab.com/groups/gitlab-org/-/epics/7673 def execute - case issue_event.event - when 'closed' - Gitlab::GithubImport::Importer::Events::Closed.new(project, author_id) - .execute(issue_event) - when 'reopened' - Gitlab::GithubImport::Importer::Events::Reopened.new(project, author_id) - .execute(issue_event) - when 'labeled', 'unlabeled' - Gitlab::GithubImport::Importer::Events::ChangedLabel.new(project, author_id) - .execute(issue_event) - when 'renamed' - Gitlab::GithubImport::Importer::Events::Renamed.new(project, author_id) - .execute(issue_event) - when 'cross-referenced' - Gitlab::GithubImport::Importer::Events::CrossReferenced.new(project, author_id) - .execute(issue_event) + return if issue_event.issuable_type == 'MergeRequest' + + importer = event_importer_class(issue_event) + if importer + importer.new(project, client).execute(issue_event) else Gitlab::GithubImport::Logger.debug( message: 'UNSUPPORTED_EVENT_TYPE', @@ -43,9 +33,23 @@ module Gitlab private - def author_id - id, _status = user_finder.author_id_for(issue_event, author_key: :actor) - id + def event_importer_class(issue_event) + case issue_event.event + when 'closed' + Gitlab::GithubImport::Importer::Events::Closed + when 'reopened' + Gitlab::GithubImport::Importer::Events::Reopened + when 'labeled', 'unlabeled' + Gitlab::GithubImport::Importer::Events::ChangedLabel + when 'renamed' + Gitlab::GithubImport::Importer::Events::Renamed + when 'milestoned', 'demilestoned' + Gitlab::GithubImport::Importer::Events::ChangedMilestone + when 'cross-referenced' + Gitlab::GithubImport::Importer::Events::CrossReferenced + when 'assigned', 'unassigned' + Gitlab::GithubImport::Importer::Events::ChangedAssignee + end end end end diff --git a/lib/gitlab/github_import/importer/issue_events_importer.rb b/lib/gitlab/github_import/importer/issue_events_importer.rb new file mode 100644 index 00000000000..71dd99f91f9 --- /dev/null +++ b/lib/gitlab/github_import/importer/issue_events_importer.rb @@ -0,0 +1,35 @@ +# frozen_string_literal: true + +module Gitlab + module GithubImport + module Importer + class IssueEventsImporter + include ParallelScheduling + + def importer_class + IssueEventImporter + end + + def representation_class + Representation::IssueEvent + end + + def sidekiq_worker_class + ImportIssueEventWorker + end + + def object_type + :issue_event + end + + def collection_method + :repository_issue_events + end + + def id_for_already_imported_cache(event) + event.id + end + end + end + end +end diff --git a/lib/gitlab/github_import/importer/issue_importer.rb b/lib/gitlab/github_import/importer/issue_importer.rb index e7d41856b04..d964bae3dd2 100644 --- a/lib/gitlab/github_import/importer/issue_importer.rb +++ b/lib/gitlab/github_import/importer/issue_importer.rb @@ -56,7 +56,8 @@ module Gitlab milestone_id: milestone_finder.id_for(issue), state_id: ::Issue.available_states[issue.state], created_at: issue.created_at, - updated_at: issue.updated_at + updated_at: issue.updated_at, + work_item_type_id: issue.work_item_type_id } insert_and_return_id(attributes, project.issues) diff --git a/lib/gitlab/github_import/importer/issues_importer.rb b/lib/gitlab/github_import/importer/issues_importer.rb index 6cc1a61b332..21d9ce8cd2d 100644 --- a/lib/gitlab/github_import/importer/issues_importer.rb +++ b/lib/gitlab/github_import/importer/issues_importer.rb @@ -6,6 +6,12 @@ module Gitlab class IssuesImporter include ParallelScheduling + def initialize(project, client, parallel: true) + super + + @work_item_type_id = ::WorkItems::Type.default_issue_type.id + end + def importer_class IssueAndLabelLinksImporter end @@ -33,6 +39,12 @@ module Gitlab def collection_options { state: 'all', sort: 'created', direction: 'asc' } end + + private + + def additional_object_data + { work_item_type_id: @work_item_type_id } + end end end end diff --git a/lib/gitlab/github_import/importer/releases_importer.rb b/lib/gitlab/github_import/importer/releases_importer.rb index 7241e1ef703..51d364772d2 100644 --- a/lib/gitlab/github_import/importer/releases_importer.rb +++ b/lib/gitlab/github_import/importer/releases_importer.rb @@ -27,9 +27,10 @@ module Gitlab def build(release) existing_tags.add(release.tag_name) - build_hash = { + { name: release.name, tag: release.tag_name, + author_id: fetch_author_id(release), description: description_for(release), created_at: release.created_at, updated_at: release.created_at, @@ -37,12 +38,6 @@ module Gitlab released_at: release.published_at || Time.current, project_id: project.id } - - if Feature.enabled?(:import_release_authors_from_github, project) - build_hash[:author_id] = fetch_author_id(release) - end - - build_hash end def each_release diff --git a/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer.rb b/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer.rb index 45bbc25e637..8e4015acbbc 100644 --- a/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer.rb +++ b/lib/gitlab/github_import/importer/single_endpoint_issue_events_importer.rb @@ -18,13 +18,16 @@ module Gitlab { project: project.id, collection: collection_method } end + # In single endpoint there is no issue info to which associated related + # To make it possible to identify issue in separated worker we need to patch + # Sawyer instances here with issue number def each_associated(parent_record, associated) compose_associated_id!(parent_record, associated) return if already_imported?(associated) Gitlab::GithubImport::ObjectCounter.increment(project, object_type, :fetched) - associated.issue_db_id = parent_record.id + associated.issue = { 'number' => parent_record.iid } yield(associated) mark_as_imported(associated) diff --git a/lib/gitlab/github_import/issuable_finder.rb b/lib/gitlab/github_import/issuable_finder.rb index da205ebd345..e7a1b7b3368 100644 --- a/lib/gitlab/github_import/issuable_finder.rb +++ b/lib/gitlab/github_import/issuable_finder.rb @@ -69,6 +69,8 @@ module Gitlab object.noteable_id elsif object.respond_to?(:iid) object.iid + elsif object.respond_to?(:issuable_id) + object.issuable_id else raise( TypeError, diff --git a/lib/gitlab/github_import/parallel_scheduling.rb b/lib/gitlab/github_import/parallel_scheduling.rb index ab20b372d53..a8c18c74d24 100644 --- a/lib/gitlab/github_import/parallel_scheduling.rb +++ b/lib/gitlab/github_import/parallel_scheduling.rb @@ -63,7 +63,7 @@ module Gitlab # Imports all the objects in sequence in the current thread. def sequential_import each_object_to_import do |object| - repr = representation_class.from_api_response(object) + repr = representation_class.from_api_response(object, additional_object_data) importer_class.new(repr, project, client).execute end @@ -72,26 +72,9 @@ module Gitlab # Imports all objects in parallel by scheduling a Sidekiq job for every # individual object. def parallel_import - if parallel_import_batch.present? - spread_parallel_import - else - parallel_import_deprecated - end - end - - def parallel_import_deprecated - waiter = JobWaiter.new - - each_object_to_import do |object| - repr = representation_class.from_api_response(object) - - sidekiq_worker_class - .perform_async(project.id, repr.to_hash, waiter.key) + raise 'Batch settings must be defined for parallel import' if parallel_import_batch.blank? - waiter.jobs_remaining += 1 - end - - waiter + spread_parallel_import end def spread_parallel_import @@ -100,7 +83,7 @@ module Gitlab import_arguments = [] each_object_to_import do |object| - repr = representation_class.from_api_response(object) + repr = representation_class.from_api_response(object, additional_object_data) import_arguments << [project.id, repr.to_hash, waiter.key] @@ -223,6 +206,10 @@ module Gitlab private + def additional_object_data + {} + end + def info(project_id, extra = {}) Logger.info(log_attributes(project_id, extra)) end diff --git a/lib/gitlab/github_import/representation/diff_note.rb b/lib/gitlab/github_import/representation/diff_note.rb index 883abef9bdb..64aa6ea5cb4 100644 --- a/lib/gitlab/github_import/representation/diff_note.rb +++ b/lib/gitlab/github_import/representation/diff_note.rb @@ -20,7 +20,7 @@ module Gitlab # Builds a diff note from a GitHub API response. # # note - An instance of `Sawyer::Resource` containing the note details. - def self.from_api_response(note) + def self.from_api_response(note, additional_data = {}) matches = note.html_url.match(NOTEABLE_ID_REGEX) unless matches diff --git a/lib/gitlab/github_import/representation/issue.rb b/lib/gitlab/github_import/representation/issue.rb index db4a8188c03..9d457ec1c2f 100644 --- a/lib/gitlab/github_import/representation/issue.rb +++ b/lib/gitlab/github_import/representation/issue.rb @@ -11,13 +11,13 @@ module Gitlab expose_attribute :iid, :title, :description, :milestone_number, :created_at, :updated_at, :state, :assignees, - :label_names, :author + :label_names, :author, :work_item_type_id # Builds an issue from a GitHub API response. # # issue - An instance of `Sawyer::Resource` containing the issue # details. - def self.from_api_response(issue) + def self.from_api_response(issue, additional_data = {}) user = if issue.user Representation::User.from_api_response(issue.user) @@ -36,7 +36,8 @@ module Gitlab author: user, created_at: issue.created_at, updated_at: issue.updated_at, - pull_request: issue.pull_request ? true : false + pull_request: issue.pull_request ? true : false, + work_item_type_id: additional_data[:work_item_type_id] } new(hash) diff --git a/lib/gitlab/github_import/representation/issue_event.rb b/lib/gitlab/github_import/representation/issue_event.rb index 9016338db3b..67a5df73a97 100644 --- a/lib/gitlab/github_import/representation/issue_event.rb +++ b/lib/gitlab/github_import/representation/issue_event.rb @@ -10,34 +10,7 @@ module Gitlab attr_reader :attributes expose_attribute :id, :actor, :event, :commit_id, :label_title, :old_title, :new_title, - :source, :created_at - expose_attribute :issue_db_id # set in SingleEndpointIssueEventsImporter#each_associated - - # Builds a event from a GitHub API response. - # - # event - An instance of `Sawyer::Resource` containing the event details. - def self.from_api_response(event) - new( - id: event.id, - actor: event.actor && Representation::User.from_api_response(event.actor), - event: event.event, - commit_id: event.commit_id, - label_title: event.label && event.label[:name], - old_title: event.rename && event.rename[:from], - new_title: event.rename && event.rename[:to], - source: event.source, - issue_db_id: event.issue_db_id, - created_at: event.created_at - ) - end - - # Builds a event using a Hash that was built from a JSON payload. - def self.from_json_hash(raw_hash) - hash = Representation.symbolize_hash(raw_hash) - hash[:actor] &&= Representation::User.from_json_hash(hash[:actor]) - - new(hash) - end + :milestone_title, :issue, :source, :assignee, :assigner, :created_at # attributes - A Hash containing the event details. The keys of this # Hash (and any nested hashes) must be symbols. @@ -48,6 +21,60 @@ module Gitlab def github_identifiers { id: id } end + + def issuable_type + issue && issue[:pull_request].present? ? 'MergeRequest' : 'Issue' + end + + def issuable_id + issue && issue[:number] + end + + class << self + # Builds an event from a GitHub API response. + # + # event - An instance of `Sawyer::Resource` containing the event details. + def from_api_response(event, additional_data = {}) + new( + id: event.id, + actor: user_representation(event.actor), + event: event.event, + commit_id: event.commit_id, + label_title: event.label && event.label[:name], + old_title: event.rename && event.rename[:from], + new_title: event.rename && event.rename[:to], + milestone_title: event.milestone && event.milestone[:title], + issue: event.issue&.to_h&.symbolize_keys, + source: event.source, + assignee: user_representation(event.assignee), + assigner: user_representation(event.assigner), + created_at: event.created_at + ) + end + + # Builds an event using a Hash that was built from a JSON payload. + def from_json_hash(raw_hash) + hash = Representation.symbolize_hash(raw_hash) + hash[:actor] = user_representation(hash[:actor], source: :hash) + hash[:assignee] = user_representation(hash[:assignee], source: :hash) + hash[:assigner] = user_representation(hash[:assigner], source: :hash) + + new(hash) + end + + private + + def user_representation(data, source: :api_response) + return unless data + + case source + when :api_response + Representation::User.from_api_response(data) + when :hash + Representation::User.from_json_hash(data) + end + end + end end end end diff --git a/lib/gitlab/github_import/representation/lfs_object.rb b/lib/gitlab/github_import/representation/lfs_object.rb index 18737bfcde3..cd614db2161 100644 --- a/lib/gitlab/github_import/representation/lfs_object.rb +++ b/lib/gitlab/github_import/representation/lfs_object.rb @@ -12,7 +12,7 @@ module Gitlab expose_attribute :oid, :link, :size # Builds a lfs_object - def self.from_api_response(lfs_object) + def self.from_api_response(lfs_object, additional_data = {}) new( oid: lfs_object.oid, link: lfs_object.link, diff --git a/lib/gitlab/github_import/representation/note.rb b/lib/gitlab/github_import/representation/note.rb index bbf20b7e9e6..ae56c370b19 100644 --- a/lib/gitlab/github_import/representation/note.rb +++ b/lib/gitlab/github_import/representation/note.rb @@ -17,7 +17,7 @@ module Gitlab # Builds a note from a GitHub API response. # # note - An instance of `Sawyer::Resource` containing the note details. - def self.from_api_response(note) + def self.from_api_response(note, additional_data = {}) matches = note.html_url.match(NOTEABLE_TYPE_REGEX) if !matches || !matches[:type] diff --git a/lib/gitlab/github_import/representation/pull_request.rb b/lib/gitlab/github_import/representation/pull_request.rb index 82bcdee8b2b..2adac2af502 100644 --- a/lib/gitlab/github_import/representation/pull_request.rb +++ b/lib/gitlab/github_import/representation/pull_request.rb @@ -18,7 +18,7 @@ module Gitlab # Builds a PR from a GitHub API response. # # issue - An instance of `Sawyer::Resource` containing the PR details. - def self.from_api_response(pr) + def self.from_api_response(pr, additional_data = {}) assignee = Representation::User.from_api_response(pr.assignee) if pr.assignee user = Representation::User.from_api_response(pr.user) if pr.user merged_by = Representation::User.from_api_response(pr.merged_by) if pr.merged_by diff --git a/lib/gitlab/github_import/representation/pull_request_review.rb b/lib/gitlab/github_import/representation/pull_request_review.rb index 70c1e51ffdd..8a7ecf0c588 100644 --- a/lib/gitlab/github_import/representation/pull_request_review.rb +++ b/lib/gitlab/github_import/representation/pull_request_review.rb @@ -11,7 +11,7 @@ module Gitlab expose_attribute :author, :note, :review_type, :submitted_at, :merge_request_id, :review_id - def self.from_api_response(review) + def self.from_api_response(review, additional_data = {}) user = Representation::User.from_api_response(review.user) if review.user new( diff --git a/lib/gitlab/github_import/representation/user.rb b/lib/gitlab/github_import/representation/user.rb index fac8920a3f2..4ef916cc41c 100644 --- a/lib/gitlab/github_import/representation/user.rb +++ b/lib/gitlab/github_import/representation/user.rb @@ -14,7 +14,7 @@ module Gitlab # Builds a user from a GitHub API response. # # user - An instance of `Sawyer::Resource` containing the user details. - def self.from_api_response(user) + def self.from_api_response(user, additional_data = {}) new( id: user.id, login: user.login diff --git a/lib/gitlab/github_import/user_finder.rb b/lib/gitlab/github_import/user_finder.rb index efaa2ce3002..6d6a00d260d 100644 --- a/lib/gitlab/github_import/user_finder.rb +++ b/lib/gitlab/github_import/user_finder.rb @@ -40,7 +40,17 @@ module Gitlab # If the object has no author ID we'll use the ID of the GitLab ghost # user. def author_id_for(object, author_key: :author) - user_info = author_key == :actor ? object&.actor : object&.author + user_info = case author_key + when :actor + object&.actor + when :assignee + object&.assignee + when :assigner + object&.assigner + else + object&.author + end + id = user_info ? user_id_for(user_info) : GithubImport.ghost_user_id if id @@ -148,7 +158,7 @@ module Gitlab # rubocop: disable CodeReuse/ActiveRecord def query_id_for_github_email(email) - User.by_any_email(email).pluck(:id).first + User.by_any_email(email).pick(:id) end # rubocop: enable CodeReuse/ActiveRecord diff --git a/lib/gitlab/global_id/deprecations.rb b/lib/gitlab/global_id/deprecations.rb index 2753e2b8372..b68aac8f862 100644 --- a/lib/gitlab/global_id/deprecations.rb +++ b/lib/gitlab/global_id/deprecations.rb @@ -3,46 +3,27 @@ module Gitlab module GlobalId module Deprecations - Deprecation = Struct.new(:old_model_name, :new_model_name, :milestone, keyword_init: true) - # Contains the deprecations in place. # Example: # # DEPRECATIONS = [ - # Deprecation.new(old_model_name: 'PrometheusService', new_model_name: 'Integrations::Prometheus', milestone: '14.1') + # Gitlab::Graphql::DeprecationsBase::NameDeprecation.new(old_name: 'PrometheusService', new_name: 'Integrations::Prometheus', milestone: '14.1') # ].freeze DEPRECATIONS = [ # This works around an accidentally released argument named as `"EEIterationID"` in 7000489db. - Deprecation.new(old_model_name: 'EEIteration', new_model_name: 'Iteration', milestone: '13.3'), - Deprecation.new(old_model_name: 'PrometheusService', new_model_name: 'Integrations::Prometheus', milestone: '14.1') + Gitlab::Graphql::DeprecationsBase::NameDeprecation.new( + old_name: 'EEIteration', new_name: 'Iteration', milestone: '13.3' + ), + Gitlab::Graphql::DeprecationsBase::NameDeprecation.new( + old_name: 'PrometheusService', new_name: 'Integrations::Prometheus', milestone: '14.1' + ) ].freeze - # Maps of the DEPRECATIONS Hash for quick access. - OLD_NAME_MAP = DEPRECATIONS.index_by(&:old_model_name).freeze - NEW_NAME_MAP = DEPRECATIONS.index_by(&:new_model_name).freeze - OLD_GRAPHQL_NAME_MAP = DEPRECATIONS.index_by do |d| - Types::GlobalIDType.model_name_to_graphql_name(d.old_model_name) - end.freeze - - def self.deprecated?(old_model_name) - OLD_NAME_MAP.key?(old_model_name) - end - - def self.deprecation_for(old_model_name) - OLD_NAME_MAP[old_model_name] + def self.map_graphql_name(model_name) + Types::GlobalIDType.model_name_to_graphql_name(model_name) end - def self.deprecation_by(new_model_name) - NEW_NAME_MAP[new_model_name] - end - - # Returns the new `graphql_name` (Type#graphql_name) of a deprecated GID, - # or the `graphql_name` argument given if no deprecation applies. - def self.apply_to_graphql_name(graphql_name) - return graphql_name unless deprecation = OLD_GRAPHQL_NAME_MAP[graphql_name] - - Types::GlobalIDType.model_name_to_graphql_name(deprecation.new_model_name) - end + include Gitlab::Graphql::DeprecationsBase end end end diff --git a/lib/gitlab/grape_logging/loggers/token_logger.rb b/lib/gitlab/grape_logging/loggers/token_logger.rb new file mode 100644 index 00000000000..a7c1b42ec96 --- /dev/null +++ b/lib/gitlab/grape_logging/loggers/token_logger.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +module Gitlab + module GrapeLogging + module Loggers + class TokenLogger < ::GrapeLogging::Loggers::Base + def parameters(request, _) + params = request.env[::API::Helpers::API_TOKEN_ENV] + + return {} unless params + + params.slice(:token_type, :token_id) + end + end + end + end +end diff --git a/lib/gitlab/graphql/deprecation.rb b/lib/gitlab/graphql/deprecation.rb index d30751fe46e..9b17962f9ec 100644 --- a/lib/gitlab/graphql/deprecation.rb +++ b/lib/gitlab/graphql/deprecation.rb @@ -21,8 +21,18 @@ module Gitlab validate :milestone_is_string validate :reason_known_or_string - def self.parse(options) - new(**options) if options + def self.parse(alpha: nil, deprecated: nil) + options = alpha || deprecated + return unless options + + if alpha + raise ArgumentError, '`alpha` and `deprecated` arguments cannot be passed at the same time' \ + if deprecated + + options[:reason] = :alpha + end + + new(**options) end def initialize(reason: nil, milestone: nil, replacement: nil) @@ -84,6 +94,10 @@ module Gitlab ].compact.join(' ') end + def alpha? + reason == REASON_ALPHA + end + private attr_reader :reason, :milestone, :replacement @@ -117,7 +131,7 @@ module Gitlab # Retruns 'Introduced in <milestone>' for :alpha deprecations. # Formatted to markdown or plain format. def changed_in_milestone(format: :plain) - verb = if reason == REASON_ALPHA + verb = if alpha? 'Introduced' else 'Deprecated' diff --git a/lib/gitlab/graphql/deprecations_base.rb b/lib/gitlab/graphql/deprecations_base.rb new file mode 100644 index 00000000000..2ee14620907 --- /dev/null +++ b/lib/gitlab/graphql/deprecations_base.rb @@ -0,0 +1,48 @@ +# frozen_string_literal: true + +module Gitlab + module Graphql + # include DeprecationsBase at the end of the target module + module DeprecationsBase + NameDeprecation = Struct.new(:old_name, :new_name, :milestone, keyword_init: true) + + def self.included(klass) + klass.extend(ClassMethods) + + klass.const_set('OLD_GRAPHQL_NAME_MAP', klass::DEPRECATIONS.index_by do |d| + klass.map_graphql_name(d.old_name) + end.freeze) + klass.const_set('OLD_NAME_MAP', klass::DEPRECATIONS.index_by(&:old_name).freeze) + klass.const_set('NEW_NAME_MAP', klass::DEPRECATIONS.index_by(&:new_name).freeze) + end + + module ClassMethods + def deprecated?(old_name) + self::OLD_NAME_MAP.key?(old_name) + end + + def deprecation_for(old_name) + self::OLD_NAME_MAP[old_name] + end + + def deprecation_by(new_name) + self::NEW_NAME_MAP[new_name] + end + + # Returns the new `graphql_name` (Type#graphql_name) of a deprecated GID, + # or the `graphql_name` argument given if no deprecation applies. + def apply_to_graphql_name(graphql_name) + return graphql_name unless deprecation = self::OLD_GRAPHQL_NAME_MAP[graphql_name] + + self.map_graphql_name(deprecation.new_name) + end + + private + + def map_graphql_name(name) + raise NotImplementedError, "Implement `#{__method__}` in #{self.name}" + end + end + end + end +end diff --git a/lib/gitlab/graphql/pagination/keyset/conditions/base_condition.rb b/lib/gitlab/graphql/pagination/keyset/conditions/base_condition.rb deleted file mode 100644 index 6645dac36fa..00000000000 --- a/lib/gitlab/graphql/pagination/keyset/conditions/base_condition.rb +++ /dev/null @@ -1,62 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Graphql - module Pagination - module Keyset - module Conditions - class BaseCondition - # @param [Arel::Table] arel_table for the relation being ordered - # @param [Array<OrderInfo>] order_list of extracted orderings - # @param [Array] values from the decoded cursor - # @param [Array<String>] operators determining sort comparison - # @param [Symbol] before_or_after indicates whether we want - # items :before the cursor or :after the cursor - def initialize(arel_table, order_list, values, operators, before_or_after) - @arel_table = arel_table - @order_list = order_list - @values = values - @operators = operators - @before_or_after = before_or_after - - @before_or_after = :after unless [:after, :before].include?(@before_or_after) - end - - def build - raise NotImplementedError - end - - private - - attr_reader :arel_table, :order_list, :values, :operators, :before_or_after - - def table_condition(order_info, value, operator) - if order_info.named_function - target = order_info.named_function - - if target.try(:name)&.casecmp('lower') == 0 - value = value&.downcase - end - else - target = arel_table[order_info.attribute_name] - end - - case operator - when '>' - target.gt(value) - when '<' - target.lt(value) - when '=' - target.eq(value) - when 'is_null' - target.eq(nil) - when 'is_not_null' - target.not_eq(nil) - end - end - end - end - end - end - end -end diff --git a/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition.rb b/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition.rb deleted file mode 100644 index ec70f5c5a24..00000000000 --- a/lib/gitlab/graphql/pagination/keyset/conditions/not_null_condition.rb +++ /dev/null @@ -1,55 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Graphql - module Pagination - module Keyset - module Conditions - class NotNullCondition < BaseCondition - def build - conditions = [first_attribute_condition] - - # If there is only one order field, we can assume it - # does not contain NULLs, and don't need additional - # conditions - unless order_list.count == 1 - conditions << [second_attribute_condition, final_condition] - end - - conditions.join - end - - private - - # ex: "(relative_position > 23)" - def first_attribute_condition - <<~SQL - (#{table_condition(order_list.first, values.first, operators.first).to_sql}) - SQL - end - - # ex: " OR (relative_position = 23 AND id > 500)" - def second_attribute_condition - <<~SQL - OR ( - #{table_condition(order_list.first, values.first, '=').to_sql} - AND - #{table_condition(order_list[1], values[1], operators[1]).to_sql} - ) - SQL - end - - # ex: " OR (relative_position IS NULL)" - def final_condition - if before_or_after == :after - <<~SQL - OR (#{table_condition(order_list.first, nil, 'is_null').to_sql}) - SQL - end - end - end - end - end - end - end -end diff --git a/lib/gitlab/graphql/pagination/keyset/conditions/null_condition.rb b/lib/gitlab/graphql/pagination/keyset/conditions/null_condition.rb deleted file mode 100644 index 1aae1020e79..00000000000 --- a/lib/gitlab/graphql/pagination/keyset/conditions/null_condition.rb +++ /dev/null @@ -1,39 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Graphql - module Pagination - module Keyset - module Conditions - class NullCondition < BaseCondition - def build - [first_attribute_condition, final_condition].join - end - - private - - # ex: "(relative_position IS NULL AND id > 500)" - def first_attribute_condition - <<~SQL - ( - #{table_condition(order_list.first, nil, 'is_null').to_sql} - AND - #{table_condition(order_list[1], values[1], operators[1]).to_sql} - ) - SQL - end - - # ex: " OR (relative_position IS NOT NULL)" - def final_condition - if before_or_after == :before - <<~SQL - OR (#{table_condition(order_list.first, nil, 'is_not_null').to_sql}) - SQL - end - end - end - end - end - end - end -end diff --git a/lib/gitlab/graphql/pagination/keyset/connection.rb b/lib/gitlab/graphql/pagination/keyset/connection.rb index 3e119a39e6d..b074c273996 100644 --- a/lib/gitlab/graphql/pagination/keyset/connection.rb +++ b/lib/gitlab/graphql/pagination/keyset/connection.rb @@ -29,7 +29,6 @@ module Gitlab include Gitlab::Utils::StrongMemoize include ::Gitlab::Graphql::ConnectionCollectionMethods prepend ::Gitlab::Graphql::ConnectionRedaction - prepend GenericKeysetPagination # rubocop: disable Naming/PredicateName # https://relay.dev/graphql/connections.htm#sec-undefined.PageInfo.Fields @@ -58,19 +57,13 @@ module Gitlab def has_next_page strong_memoize(:has_next_page) do if before - # If `before` is specified, that points to a specific record, - # even if it's the last one. Since we're asking for `before`, - # then the specific record we're pointing to is in the - # next page true elsif first case sliced_nodes when Array sliced_nodes.size > limit_value else - # If we count the number of requested items plus one (`limit_value + 1`), - # then if we get `limit_value + 1` then we know there is a next page - relation_count(set_limit(sliced_nodes, limit_value + 1)) == limit_value + 1 + sliced_nodes.limit(1).offset(limit_value).exists? # rubocop: disable CodeReuse/ActiveRecord end else false @@ -80,20 +73,15 @@ module Gitlab # rubocop: enable Naming/PredicateName def cursor_for(node) - encoded_json_from_ordering(node) + order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(items) + encode(order.cursor_attributes_for_node(node).to_json) end def sliced_nodes - @sliced_nodes ||= - begin - OrderInfo.validate_ordering(ordered_items, order_list) unless loaded?(ordered_items) - - sliced = ordered_items - sliced = slice_nodes(sliced, before, :before) if before.present? - sliced = slice_nodes(sliced, after, :after) if after.present? - - sliced - end + sliced = ordered_items + sliced = slice_nodes(sliced, before, :before) if before.present? + sliced = slice_nodes(sliced, after, :after) if after.present? + sliced end def nodes @@ -104,6 +92,20 @@ module Gitlab @nodes ||= limited_nodes.to_a end + def items + original_items = super + return original_items if Gitlab::Pagination::Keyset::Order.keyset_aware?(original_items) + + strong_memoize(:keyset_pagination_items) do + rebuilt_items_with_keyset_order, success = + Gitlab::Pagination::Keyset::SimpleOrderBuilder.build(original_items) + + raise(Gitlab::Pagination::Keyset::UnsupportedScopeOrder) unless success + + rebuilt_items_with_keyset_order + end + end + private # Apply `first` and `last` to `sliced_nodes` @@ -129,11 +131,11 @@ module Gitlab # rubocop: disable CodeReuse/ActiveRecord def slice_nodes(sliced, encoded_cursor, before_or_after) - decoded_cursor = ordering_from_encoded_json(encoded_cursor) - builder = QueryBuilder.new(arel_table, order_list, decoded_cursor, before_or_after) - ordering = builder.conditions + order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(sliced) + order = order.reversed_order if before_or_after == :before - sliced.where(*ordering).where.not(id: decoded_cursor['id']) + decoded_cursor = ordering_from_encoded_json(encoded_cursor) + order.apply_cursor_conditions(sliced, decoded_cursor) end # rubocop: enable CodeReuse/ActiveRecord @@ -157,57 +159,10 @@ module Gitlab raise ArgumentError, 'Relation must have a primary key' end - list = OrderInfo.build_order_list(items) - - if loaded?(items) && !before.present? && !after.present? - @order_list = list.presence || [OrderInfo.new(items.primary_key)] - - # already sorted, or trivially sorted - next items if list.present? || items.size <= 1 - - pkey = items.primary_key.to_sym - next items.sort_by { |item| item[pkey] }.reverse - end - - # ensure there is a primary key ordering - if list&.last&.attribute_name != items.primary_key - items.order(arel_table[items.primary_key].desc) # rubocop: disable CodeReuse/ActiveRecord - else - items - end - end - end - - def order_list - strong_memoize(:order_list) do - OrderInfo.build_order_list(ordered_items) + items end end - def arel_table - items.arel_table - end - - # Storing the current order values in the cursor allows us to - # make an intelligent decision on handling NULL values. - # Otherwise we would either need to fetch the record first, - # or fetch it in the SQL, significantly complicating it. - def encoded_json_from_ordering(node) - ordering = { 'id' => node[:id].to_s } - - order_list.each do |field| - field_name = field.try(:attribute_name) || field - field_value = node[field_name] - ordering[field_name] = if field_value.is_a?(Time) - field_value.to_s(:inspect) - else - field_value.to_s - end - end - - encode(ordering.to_json) - end - def ordering_from_encoded_json(cursor) Gitlab::Json.parse(decode(cursor)) rescue JSON::ParserError diff --git a/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb b/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb deleted file mode 100644 index 9beb40ddd7e..00000000000 --- a/lib/gitlab/graphql/pagination/keyset/generic_keyset_pagination.rb +++ /dev/null @@ -1,98 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Graphql - module Pagination - module Keyset - # https://gitlab.com/gitlab-org/gitlab/-/issues/334973 - # Use the generic keyset implementation if the given ActiveRecord scope supports it. - # Note: this module is temporary, at some point it will be merged with Keyset::Connection - module GenericKeysetPagination - extend ActiveSupport::Concern - - # rubocop: disable Naming/PredicateName - # rubocop: disable CodeReuse/ActiveRecord - def has_next_page - return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(items) - - strong_memoize(:generic_keyset_pagination_has_next_page) do - if before - true - elsif first - case sliced_nodes - when Array - sliced_nodes.size > limit_value - else - sliced_nodes.limit(1).offset(limit_value).exists? - end - else - false - end - end - end - - # rubocop: enable CodeReuse/ActiveRecord - def ordered_items - raise ArgumentError, 'Relation must have a primary key' unless items.primary_key.present? - - return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(items) - - items - end - - def cursor_for(node) - return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(items) - - order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(items) - encode(order.cursor_attributes_for_node(node).to_json) - end - - def slice_nodes(sliced, encoded_cursor, before_or_after) - return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(sliced) - - order = Gitlab::Pagination::Keyset::Order.extract_keyset_order_object(sliced) - order = order.reversed_order if before_or_after == :before - - decoded_cursor = ordering_from_encoded_json(encoded_cursor) - order.apply_cursor_conditions(sliced, decoded_cursor) - end - - def sliced_nodes - return super unless Gitlab::Pagination::Keyset::Order.keyset_aware?(items) - - sliced = ordered_items - sliced = slice_nodes(sliced, before, :before) if before.present? - sliced = slice_nodes(sliced, after, :after) if after.present? - sliced - end - - def items - original_items = super - return original_items if Feature.disabled?(:new_graphql_keyset_pagination) || Gitlab::Pagination::Keyset::Order.keyset_aware?(original_items) - - strong_memoize(:generic_keyset_pagination_items) do - rebuilt_items_with_keyset_order, success = Gitlab::Pagination::Keyset::SimpleOrderBuilder.build(original_items) - - if success - rebuilt_items_with_keyset_order - else - if original_items.is_a?(ActiveRecord::Relation) - old_keyset_pagination_usage.increment({ model: original_items.model.to_s }) - end - - original_items - end - end - end - - def old_keyset_pagination_usage - @old_keyset_pagination_usage ||= Gitlab::Metrics.counter( - :old_keyset_pagination_usage, - 'The number of times the old keyset pagination code was used' - ) - end - end - end - end - end -end diff --git a/lib/gitlab/graphql/pagination/keyset/order_info.rb b/lib/gitlab/graphql/pagination/keyset/order_info.rb deleted file mode 100644 index 57e85ebe7f6..00000000000 --- a/lib/gitlab/graphql/pagination/keyset/order_info.rb +++ /dev/null @@ -1,124 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Graphql - module Pagination - module Keyset - class OrderInfo - attr_reader :attribute_name, :sort_direction, :named_function - - def initialize(order_value) - @attribute_name, @sort_direction, @named_function = - if order_value.is_a?(String) - extract_nulls_last_order(order_value) - else - extract_attribute_values(order_value) - end - end - - def operator_for(before_or_after) - case before_or_after - when :before - sort_direction == :asc ? '<' : '>' - when :after - sort_direction == :asc ? '>' : '<' - end - end - - # Only allow specific node types - def self.build_order_list(relation) - order_list = relation.order_values.select do |value| - supported_order_value?(value) - end - - order_list.map { |info| OrderInfo.new(info) } - end - - def self.validate_ordering(relation, order_list) - if order_list.empty? - raise ArgumentError, 'A minimum of 1 ordering field is required' - end - - if order_list.count > 2 - # Keep in mind an order clause for primary key is added if one is not present - # lib/gitlab/graphql/pagination/keyset/connection.rb:97 - raise ArgumentError, 'A maximum of 2 ordering fields are allowed' - end - - # make sure the last ordering field is non-nullable - attribute_name = order_list.last&.attribute_name - - if relation.columns_hash[attribute_name].null - raise ArgumentError, "Column `#{attribute_name}` must not allow NULL" - end - - if order_list.last.attribute_name != relation.primary_key - raise ArgumentError, "Last ordering field must be the primary key, `#{relation.primary_key}`" - end - end - - def self.supported_order_value?(order_value) - return true if order_value.is_a?(Arel::Nodes::Ascending) || order_value.is_a?(Arel::Nodes::Descending) - return false unless order_value.is_a?(String) - - tokens = order_value.downcase.split - - tokens.last(2) == %w(nulls last) && tokens.count == 4 - end - - private - - def extract_nulls_last_order(order_value) - tokens = order_value.downcase.split - - column_reference = tokens.first - sort_direction = tokens[1] == 'asc' ? :asc : :desc - - # Handles the case when the order value is coming from another table. - # Example: table_name.column_name - # Query the value using the fully qualified column name: pass table_name.column_name as the named_function - if fully_qualified_column_reference?(column_reference) - [column_reference, sort_direction, Arel.sql(column_reference)] - else - [column_reference, sort_direction, nil] - end - end - - # Example: table_name.column_name - def fully_qualified_column_reference?(attribute) - attribute.to_s.count('.') == 1 - end - - def extract_attribute_values(order_value) - if ordering_by_lower?(order_value) - [order_value.expr.expressions[0].name.to_s, order_value.direction, order_value.expr] - elsif ordering_by_case?(order_value) - ['case_order_value', order_value.direction, order_value.expr] - elsif ordering_by_array_position?(order_value) - ['array_position', order_value.direction, order_value.expr] - else - [order_value.expr.name, order_value.direction, nil] - end - end - - # determine if ordering using LOWER, eg. "ORDER BY LOWER(boards.name)" - def ordering_by_lower?(order_value) - order_value.expr.is_a?(Arel::Nodes::NamedFunction) && order_value.expr&.name&.downcase == 'lower' - end - - # determine if ordering using ARRAY_POSITION, eg. "ORDER BY ARRAY_POSITION(Array[4,3,1,2]::smallint, state)" - def ordering_by_array_position?(order_value) - order_value.expr.is_a?(Arel::Nodes::NamedFunction) && order_value.expr&.name&.downcase == 'array_position' - end - - # determine if ordering using CASE - def ordering_by_case?(order_value) - order_value.expr.is_a?(Arel::Nodes::Case) - end - end - end - end - end -end - -Gitlab::Graphql::Pagination::Keyset::OrderInfo.prepend_mod_with('Gitlab::Graphql::Pagination::Keyset::OrderInfo') diff --git a/lib/gitlab/graphql/pagination/keyset/query_builder.rb b/lib/gitlab/graphql/pagination/keyset/query_builder.rb deleted file mode 100644 index a2f53ae83dd..00000000000 --- a/lib/gitlab/graphql/pagination/keyset/query_builder.rb +++ /dev/null @@ -1,73 +0,0 @@ -# frozen_string_literal: true - -module Gitlab - module Graphql - module Pagination - module Keyset - class QueryBuilder - def initialize(arel_table, order_list, decoded_cursor, before_or_after) - @arel_table = arel_table - @order_list = order_list - @decoded_cursor = decoded_cursor - @before_or_after = before_or_after - - if order_list.empty? - raise ArgumentError, 'No ordering scopes have been supplied' - end - end - - # Based on whether the main field we're ordering on is NULL in the - # cursor, we can more easily target our query condition. - # We assume that the last ordering field is unique, meaning - # it will not contain NULLs. - # We currently only support two ordering fields. - # - # Example of the conditions for - # relation: Issue.order(relative_position: :asc).order(id: :asc) - # after cursor: relative_position: 1500, id: 500 - # - # when cursor[relative_position] is not NULL - # - # ("issues"."relative_position" > 1500) - # OR ( - # "issues"."relative_position" = 1500 - # AND - # "issues"."id" > 500 - # ) - # OR ("issues"."relative_position" IS NULL) - # - # when cursor[relative_position] is NULL - # - # "issues"."relative_position" IS NULL - # AND - # "issues"."id" > 500 - # - def conditions - attr_values = order_list.map do |field| - name = field.try(:attribute_name) || field - decoded_cursor[name] - end - - if order_list.count == 1 && attr_values.first.nil? - raise Gitlab::Graphql::Errors::ArgumentError, 'Before/after cursor invalid: `nil` was provided as only sortable value' - end - - if order_list.count == 1 || attr_values.first.present? - Keyset::Conditions::NotNullCondition.new(arel_table, order_list, attr_values, operators, before_or_after).build - else - Keyset::Conditions::NullCondition.new(arel_table, order_list, attr_values, operators, before_or_after).build - end - end - - private - - attr_reader :arel_table, :order_list, :decoded_cursor, :before_or_after - - def operators - order_list.map { |field| field.operator_for(before_or_after) } - end - end - end - end - end -end diff --git a/lib/gitlab/graphql/type_name_deprecations.rb b/lib/gitlab/graphql/type_name_deprecations.rb new file mode 100644 index 00000000000..c27ad1d54f5 --- /dev/null +++ b/lib/gitlab/graphql/type_name_deprecations.rb @@ -0,0 +1,27 @@ +# frozen_string_literal: true + +module Gitlab + module Graphql + module TypeNameDeprecations + # Contains the deprecations in place. + # Example: + # + # DEPRECATIONS = [ + # Gitlab::Graphql::DeprecationsBase::NameDeprecation.new( + # old_name: 'CiRunnerUpgradeStatusType', new_name: 'CiRunnerUpgradeStatus', milestone: '15.3' + # ) + # ].freeze + DEPRECATIONS = [ + Gitlab::Graphql::DeprecationsBase::NameDeprecation.new( + old_name: 'CiRunnerUpgradeStatusType', new_name: 'CiRunnerUpgradeStatus', milestone: '15.3' + ) + ].freeze + + def self.map_graphql_name(name) + name + end + + include Gitlab::Graphql::DeprecationsBase + end + end +end diff --git a/lib/gitlab/hook_data/merge_request_builder.rb b/lib/gitlab/hook_data/merge_request_builder.rb index b4f90715293..65c623c5d7d 100644 --- a/lib/gitlab/hook_data/merge_request_builder.rb +++ b/lib/gitlab/hook_data/merge_request_builder.rb @@ -22,6 +22,7 @@ module Gitlab merge_user_id merge_when_pipeline_succeeds milestone_id + reviewer_ids source_branch source_project_id state_id @@ -38,6 +39,7 @@ module Gitlab %i[ assignees labels + reviewers total_time_spent time_change ].freeze @@ -60,6 +62,7 @@ module Gitlab human_time_estimate: merge_request.human_time_estimate, assignee_ids: merge_request.assignee_ids, assignee_id: merge_request.assignee_ids.first, # This key is deprecated + reviewer_ids: merge_request.reviewer_ids, labels: merge_request.labels_hook_attrs, state: merge_request.state, # This key is deprecated blocking_discussions_resolved: merge_request.mergeable_discussions_state?, diff --git a/lib/gitlab/http_io.rb b/lib/gitlab/http_io.rb index bd3ac139168..25b86fbf22f 100644 --- a/lib/gitlab/http_io.rb +++ b/lib/gitlab/http_io.rb @@ -153,7 +153,7 @@ module Gitlab http.request(request) end - raise FailedToGetChunkError unless response.code == '200' || response.code == '206' + raise FailedToGetChunkError, "Unexpected response code: #{response.code}" unless response.code == '200' || response.code == '206' @chunk = response.body.force_encoding(Encoding::BINARY) @chunk_range = response.content_range diff --git a/lib/gitlab/i18n.rb b/lib/gitlab/i18n.rb index cad0e773b05..30465ff5f74 100644 --- a/lib/gitlab/i18n.rb +++ b/lib/gitlab/i18n.rb @@ -44,30 +44,30 @@ module Gitlab TRANSLATION_LEVELS = { 'bg' => 0, 'cs_CZ' => 0, - 'da_DK' => 40, - 'de' => 15, + 'da_DK' => 39, + 'de' => 17, 'en' => 100, 'eo' => 0, - 'es' => 37, + 'es' => 38, 'fil_PH' => 0, 'fr' => 11, 'gl_ES' => 0, 'id_ID' => 0, 'it' => 1, - 'ja' => 33, - 'ko' => 11, + 'ja' => 32, + 'ko' => 12, 'nb_NO' => 26, 'nl_NL' => 0, 'pl_PL' => 4, 'pt_BR' => 55, 'ro_RO' => 100, - 'ru' => 28, - 'si_LK' => 11, + 'ru' => 27, + 'si_LK' => 10, 'tr_TR' => 12, - 'uk' => 49, + 'uk' => 50, 'zh_CN' => 99, - 'zh_HK' => 2, - 'zh_TW' => 4 + 'zh_HK' => 1, + 'zh_TW' => 100 }.freeze private_constant :TRANSLATION_LEVELS diff --git a/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy.rb b/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy.rb index e38496ecf67..34e75755dec 100644 --- a/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy.rb +++ b/lib/gitlab/import_export/after_export_strategies/base_after_export_strategy.rb @@ -12,12 +12,13 @@ module Gitlab private - attr_reader :project, :current_user, :lock_file + attr_reader :project, :current_user, :lock_file, :logger public def initialize(attributes = {}) @options = attributes + @logger = Gitlab::Export::Logger.build end def method_missing(method, *args) @@ -43,6 +44,10 @@ module Gitlab true rescue StandardError => e + payload = { message: "After export strategy failed" } + Gitlab::ExceptionLogFormatter.format!(e, payload) + log_error(payload) + project.import_export_shared.error(e) false ensure @@ -108,6 +113,18 @@ module Gitlab def log_validation_errors errors.full_messages.each { |msg| project.import_export_shared.add_error_message(msg) } end + + def log_info(params) + logger.info(log_default_params.merge(params)) + end + + def log_error(params) + logger.error(log_default_params.merge(params)) + end + + def log_default_params + { project_name: project.name, project_id: project.id } + end end end end diff --git a/lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb b/lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb index 2e3136936f8..bddbe7862cb 100644 --- a/lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb +++ b/lib/gitlab/import_export/after_export_strategies/move_file_strategy.rb @@ -5,6 +5,7 @@ module Gitlab module AfterExportStrategies class MoveFileStrategy < BaseAfterExportStrategy def initialize(archive_path:) + super @archive_path = archive_path end diff --git a/lib/gitlab/import_export/after_export_strategies/web_upload_strategy.rb b/lib/gitlab/import_export/after_export_strategies/web_upload_strategy.rb index 78608a946de..6c5fba37d7b 100644 --- a/lib/gitlab/import_export/after_export_strategies/web_upload_strategy.rb +++ b/lib/gitlab/import_export/after_export_strategies/web_upload_strategy.rb @@ -23,7 +23,17 @@ module Gitlab protected def strategy_execute - handle_response_error(send_file) + log_info(message: "Started uploading project", export_size: export_size) + + upload_duration = Benchmark.realtime do + if Feature.enabled?(:import_export_web_upload_stream) && !project.export_file.file_storage? + upload_project_as_remote_stream + else + handle_response_error(send_file) + end + end + + log_info(message: "Finished uploading project", export_size: export_size, upload_duration: upload_duration) end def handle_response_error(response) @@ -44,8 +54,22 @@ module Gitlab export_file.close if export_file end + def upload_project_as_remote_stream + Gitlab::ImportExport::RemoteStreamUpload.new( + download_url: project.export_file.url, + upload_url: url, + options: { + upload_method: http_method.downcase.to_sym, + upload_content_type: 'application/gzip' + }).execute + rescue Gitlab::ImportExport::RemoteStreamUpload::StreamError => e + log_error(message: e.message, response_body: e.response_body.truncate(3000)) + + raise + end + def export_file - project.export_file.open + @export_file ||= project.export_file.open end def send_file_options diff --git a/lib/gitlab/import_export/after_export_strategy_builder.rb b/lib/gitlab/import_export/after_export_strategy_builder.rb index d7b30f46903..90618922dfe 100644 --- a/lib/gitlab/import_export/after_export_strategy_builder.rb +++ b/lib/gitlab/import_export/after_export_strategy_builder.rb @@ -9,7 +9,11 @@ module Gitlab return default_strategy.new unless strategy_klass attributes ||= {} - klass = strategy_klass.constantize rescue nil + klass = begin + strategy_klass.constantize + rescue StandardError + nil + end unless klass && klass < AfterExportStrategies::BaseAfterExportStrategy raise StrategyNotFoundError, "Strategy #{strategy_klass} not found" diff --git a/lib/gitlab/import_export/base/relation_factory.rb b/lib/gitlab/import_export/base/relation_factory.rb index 53dd6f8cd55..1cbfcbdb595 100644 --- a/lib/gitlab/import_export/base/relation_factory.rb +++ b/lib/gitlab/import_export/base/relation_factory.rb @@ -126,12 +126,19 @@ module Gitlab end end + # When an assignee (or any other listed association) did not exist in the members mapper, the importer is + # assigned. We only need to assign each user once. def remove_duplicate_assignees - return unless @relation_hash['issue_assignees'] + associations = %w[issue_assignees merge_request_assignees merge_request_reviewers approvals] - # When an assignee did not exist in the members mapper, the importer is - # assigned. We only need to assign each user once. - @relation_hash['issue_assignees'].uniq!(&:user_id) + associations.each do |association| + next unless @relation_hash.key?(association) + next unless @relation_hash[association].is_a?(Array) + next if @relation_hash[association].empty? + + @relation_hash[association].select! { |record| record.respond_to?(:user_id) } + @relation_hash[association].uniq!(&:user_id) + end end def generate_imported_object diff --git a/lib/gitlab/import_export/base/relation_object_saver.rb b/lib/gitlab/import_export/base/relation_object_saver.rb index d0fae2cbb95..ea989487ebd 100644 --- a/lib/gitlab/import_export/base/relation_object_saver.rb +++ b/lib/gitlab/import_export/base/relation_object_saver.rb @@ -15,7 +15,7 @@ module Gitlab include Gitlab::Utils::StrongMemoize BATCH_SIZE = 100 - MIN_RECORDS_SIZE = 5 + MIN_RECORDS_SIZE = 1 # @param relation_object [Object] Object of a project/group, e.g. an issue # @param relation_key [String] Name of the object association to group/project, e.g. :issues diff --git a/lib/gitlab/import_export/decompressed_archive_size_validator.rb b/lib/gitlab/import_export/decompressed_archive_size_validator.rb index a185eb4df1c..c98dcf7b848 100644 --- a/lib/gitlab/import_export/decompressed_archive_size_validator.rb +++ b/lib/gitlab/import_export/decompressed_archive_size_validator.rb @@ -28,25 +28,26 @@ module Gitlab private def validate - pgrp = nil + pgrps = nil valid_archive = true validate_archive_path Timeout.timeout(TIMEOUT_LIMIT) do - stdin, stdout, stderr, wait_thr = Open3.popen3(command, pgroup: true) - stdin.close + stderr_r, stderr_w = IO.pipe + stdout, wait_threads = Open3.pipeline_r(*command, pgroup: true, err: stderr_w ) # When validation is performed on a small archive (e.g. 100 bytes) # `wait_thr` finishes before we can get process group id. Do not # raise exception in this scenario. - pgrp = begin + pgrps = wait_threads.map do |wait_thr| Process.getpgid(wait_thr[:pid]) rescue Errno::ESRCH nil end + pgrps.compact! - status = wait_thr.value + status = wait_threads.last.value if status.success? result = stdout.readline @@ -64,20 +65,21 @@ module Gitlab ensure stdout.close - stderr.close + stderr_w.close + stderr_r.close end valid_archive rescue Timeout::Error log_error('Timeout reached during archive decompression') - Process.kill(-1, pgrp) if pgrp + pgrps.each { |pgrp| Process.kill(-1, pgrp) } if pgrps false rescue StandardError => e log_error(e.message) - Process.kill(-1, pgrp) if pgrp + pgrps.each { |pgrp| Process.kill(-1, pgrp) } if pgrps false end @@ -91,7 +93,7 @@ module Gitlab end def command - "gzip -dc #{@archive_path} | wc -c" + [['gzip', '-dc', @archive_path], ['wc', '-c']] end def log_error(error) diff --git a/lib/gitlab/import_export/group/relation_tree_restorer.rb b/lib/gitlab/import_export/group/relation_tree_restorer.rb index 4b28dd831fc..fab677bd772 100644 --- a/lib/gitlab/import_export/group/relation_tree_restorer.rb +++ b/lib/gitlab/import_export/group/relation_tree_restorer.rb @@ -89,7 +89,7 @@ module Gitlab end def save_relation_object(relation_object, relation_key, relation_definition, relation_index) - if Feature.enabled?(:import_relation_object_persistence) && relation_object.new_record? + if relation_object.new_record? Gitlab::ImportExport::Base::RelationObjectSaver.new( relation_object: relation_object, relation_key: relation_key, diff --git a/lib/gitlab/import_export/json/streaming_serializer.rb b/lib/gitlab/import_export/json/streaming_serializer.rb index 59396c6bad2..78f43f79072 100644 --- a/lib/gitlab/import_export/json/streaming_serializer.rb +++ b/lib/gitlab/import_export/json/streaming_serializer.rb @@ -18,11 +18,12 @@ module Gitlab end end - def initialize(exportable, relations_schema, json_writer, exportable_path:) + def initialize(exportable, relations_schema, json_writer, exportable_path:, logger: Gitlab::Export::Logger) @exportable = exportable @exportable_path = exportable_path @relations_schema = relations_schema @json_writer = json_writer + @logger = logger end def execute @@ -36,6 +37,8 @@ module Gitlab end def serialize_root(exportable_path = @exportable_path) + log_relation_export('root') + attributes = exportable.as_json( relations_schema.merge(include: nil, preloads: nil, unsafe: true)) @@ -60,9 +63,11 @@ module Gitlab private - attr_reader :json_writer, :relations_schema, :exportable + attr_reader :json_writer, :relations_schema, :exportable, :logger def serialize_many_relations(key, records, options) + log_relation_export(key, records.size) + enumerator = Enumerator.new do |items| key_preloads = preloads&.dig(key) @@ -106,6 +111,8 @@ module Gitlab end def serialize_many_each(key, records, options) + log_relation_export(key, records.size) + enumerator = Enumerator.new do |items| records.each do |record| items << Raw.new(record.to_json(options)) @@ -116,6 +123,8 @@ module Gitlab end def serialize_single_relation(key, record, options) + log_relation_export(key) + json = Raw.new(record.to_json(options)) json_writer.write_relation(@exportable_path, key, json) @@ -186,6 +195,18 @@ module Gitlab record.merge_request_diff&.remove_cached_external_diff end + + def log_base_data + log = { importer: 'Import/Export' } + log.merge!(Gitlab::ImportExport::LogUtil.exportable_to_log_payload(exportable)) + log + end + + def log_relation_export(relation, size = nil) + message = "Exporting #{relation} relation" + message += ". Number of records to export: #{size}" if size + logger.info(message: message, **log_base_data) + end end end end diff --git a/lib/gitlab/import_export/log_util.rb b/lib/gitlab/import_export/log_util.rb new file mode 100644 index 00000000000..d3a3dce47ba --- /dev/null +++ b/lib/gitlab/import_export/log_util.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Gitlab + module ImportExport + class LogUtil + def self.exportable_to_log_payload(exportable) + attribute_base_name = exportable.class.name.underscore + + return {} unless %w[project group].include?(attribute_base_name) + + {}.tap do |log| + log[:"#{attribute_base_name}_id"] = exportable.id + log[:"#{attribute_base_name}_name"] = exportable.name + log[:"#{attribute_base_name}_path"] = exportable.full_path + end.compact + end + end + end +end diff --git a/lib/gitlab/import_export/project/import_export.yml b/lib/gitlab/import_export/project/import_export.yml index 50ff6146174..c5b8f3fd35b 100644 --- a/lib/gitlab/import_export/project/import_export.yml +++ b/lib/gitlab/import_export/project/import_export.yml @@ -53,8 +53,11 @@ tree: - project_members: - :user - merge_requests: + - :approvals - :metrics - :award_emoji + - :merge_request_assignees + - :merge_request_reviewers - notes: - :author - :award_emoji @@ -120,6 +123,10 @@ included_attributes: - :username author: - :name + approvals: + - :user_id + - :created_at + - :updated_at ci_cd_settings: - :group_runners_enabled - :runner_token_expiration_interval @@ -284,6 +291,9 @@ included_attributes: - :security_and_compliance_access_level - :container_registry_access_level - :package_registry_access_level + - :environments_access_level + - :feature_flags_access_level + - :releases_access_level prometheus_metrics: - :created_at - :updated_at @@ -328,7 +338,6 @@ included_attributes: - :source_branch - :source_project_id - :author_id - - :assignee_id - :title - :created_at - :updated_at @@ -587,6 +596,14 @@ included_attributes: - :author_id issue_assignees: - :user_id + merge_request_assignees: + - :user_id + - :created_at + - :state + merge_request_reviewers: + - :user_id + - :created_at + - :state sentry_issue: - :sentry_issue_identifier zoom_meetings: @@ -640,7 +657,6 @@ included_attributes: - :auto_cancel_pending_pipelines - :autoclose_referenced_issues - :build_allow_git_fetch - - :build_coverage_regex - :build_timeout - :delete_error - :description @@ -686,6 +702,9 @@ included_attributes: - :security_and_compliance_access_level - :container_registry_access_level - :package_registry_access_level + - :environments_access_level + - :feature_flags_access_level + - :releases_access_level - :allow_merge_on_skipped_pipeline - :auto_devops_deploy_strategy - :auto_devops_enabled @@ -762,6 +781,9 @@ excluded_attributes: - :repository_size_limit - :external_webhook_token - :incident_management_issuable_escalation_statuses + approvals: + - :id + - :merge_request_id namespaces: - :runners_token - :runners_token_encrypted diff --git a/lib/gitlab/import_export/project/relation_saver.rb b/lib/gitlab/import_export/project/relation_saver.rb new file mode 100644 index 00000000000..b40827e36f8 --- /dev/null +++ b/lib/gitlab/import_export/project/relation_saver.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +module Gitlab + module ImportExport + module Project + class RelationSaver + def initialize(project:, shared:, relation:) + @project = project + @relation = relation + @shared = shared + end + + def save + if root_relation? + serializer.serialize_root + else + serializer.serialize_relation(relation_schema) + end + + true + rescue StandardError => e + shared.error(e) + false + end + + private + + attr_reader :project, :relation, :shared + + def serializer + @serializer ||= ::Gitlab::ImportExport::Json::StreamingSerializer.new( + project, + reader.project_tree, + json_writer, + exportable_path: 'project' + ) + end + + def root_relation? + relation == Projects::ImportExport::RelationExport::ROOT_RELATION + end + + def relation_schema + reader.project_tree[:include].find { |include| include[relation.to_sym] } + end + + def reader + @reader ||= ::Gitlab::ImportExport::Reader.new(shared: shared) + end + + def json_writer + @json_writer ||= ::Gitlab::ImportExport::Json::NdjsonWriter.new(shared.export_path) + end + end + end + end +end diff --git a/lib/gitlab/import_export/project/tree_saver.rb b/lib/gitlab/import_export/project/tree_saver.rb index 05dcfa5282c..1b54e4b975e 100644 --- a/lib/gitlab/import_export/project/tree_saver.rb +++ b/lib/gitlab/import_export/project/tree_saver.rb @@ -8,7 +8,7 @@ module Gitlab attr_reader :full_path - def initialize(project:, current_user:, shared:, params: {}, logger: Gitlab::Import::Logger) + def initialize(project:, current_user:, shared:, params: {}, logger: Gitlab::Export::Logger) @params = params @project = project @current_user = current_user @@ -49,7 +49,8 @@ module Gitlab exportable, reader.project_tree, json_writer, - exportable_path: "project" + exportable_path: "project", + logger: @logger ) Retriable.retriable(on: Net::OpenTimeout, on_retry: on_retry) do diff --git a/lib/gitlab/import_export/remote_stream_upload.rb b/lib/gitlab/import_export/remote_stream_upload.rb new file mode 100644 index 00000000000..f3bd241c0bd --- /dev/null +++ b/lib/gitlab/import_export/remote_stream_upload.rb @@ -0,0 +1,117 @@ +# frozen_string_literal: true + +# This class downloads a file from one URL and uploads it to another URL +# without having to save the file on the disk and loading the whole file in +# memory. The download and upload are performed in chunks size of +# `buffer_size`. A chunk is downloaded, then uploaded, then a next chunk is +# downloaded and uploaded. This repeats until all the file is processed. + +module Gitlab + module ImportExport + class RemoteStreamUpload + def initialize(download_url:, upload_url:, options: {}) + @download_url = download_url + @upload_url = upload_url + @upload_method = options[:upload_method] || :post + @upload_content_type = options[:upload_content_type] || 'application/gzip' + end + + def execute + receive_data(download_url) do |response, chunks| + send_data(upload_url, response.content_length, chunks) do |response| + if response.code != '200' + raise StreamError.new("Invalid response code while uploading file. Code: #{response.code}", response.body) + end + end + end + end + class StreamError < StandardError + attr_reader :response_body + + def initialize(message, response_body = '') + super(message) + @response_body = response_body + end + end + class ChunkStream + DEFAULT_BUFFER_SIZE = 128.kilobytes + + def initialize(chunks) + @chunks = chunks + @last_chunk = nil + @end_of_chunks = false + end + + def read(n1 = nil, n2 = nil) + ensure_chunk&.read(n1, n2) + end + + private + + def ensure_chunk + return @last_chunk if @last_chunk && !@last_chunk.eof? + return if @end_of_chunks + + @last_chunk = read_next_chunk + end + + def read_next_chunk + next_chunk = StringIO.new + + begin + next_chunk.write(@chunks.next) until next_chunk.size > DEFAULT_BUFFER_SIZE + rescue StopIteration + @end_of_chunks = true + end + + next_chunk.rewind + + next_chunk + end + end + + private + + attr_reader :download_url, :upload_url, :upload_method, :upload_content_type, :logger + + def receive_data(uri) + http = Gitlab::HTTPConnectionAdapter.new(URI(uri), {}).connection + + http.start do + request = Net::HTTP::Get.new(uri) + http.request(request) do |response| + if response.code == '200' + yield(response, response.enum_for(:read_body)) + else + raise StreamError.new( + "Invalid response code while downloading file. Code: #{response.code}", + response.body + ) + end + end + end + end + + def send_data(uri, content_length, chunks) + http = Gitlab::HTTPConnectionAdapter.new(URI(uri), {}).connection + + http.start do + request = upload_request_class(upload_method).new(uri) + request.body_stream = ChunkStream.new(chunks) + request.content_length = content_length + request.content_type = upload_content_type + + http.request(request) do |response| + yield(response) + end + end + end + + def upload_request_class(upload_method) + return Net::HTTP::Put if upload_method == :put + + Net::HTTP::Post + end + end + end +end diff --git a/lib/gitlab/import_export/shared.rb b/lib/gitlab/import_export/shared.rb index 5cb1c1f8981..6337842255d 100644 --- a/lib/gitlab/import_export/shared.rb +++ b/lib/gitlab/import_export/shared.rb @@ -95,14 +95,9 @@ module Gitlab end def log_base_data - log = { - importer: 'Import/Export', - exportable_id: @exportable&.id, - exportable_path: @exportable&.full_path - } - + log = { importer: 'Import/Export' } + log.merge!(Gitlab::ImportExport::LogUtil.exportable_to_log_payload(@exportable)) log[:import_jid] = @exportable&.import_state&.jid if exportable_type == 'Project' - log end diff --git a/lib/gitlab/instrumentation/global_search_api.rb b/lib/gitlab/instrumentation/global_search_api.rb new file mode 100644 index 00000000000..ea2f5702364 --- /dev/null +++ b/lib/gitlab/instrumentation/global_search_api.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module Gitlab + module Instrumentation + class GlobalSearchApi + TYPE = 'meta.search.type' + LEVEL = 'meta.search.level' + SCOPE = 'meta.search.scope' + SEARCH_DURATION_S = :global_search_duration_s + + def self.get_type + ::Gitlab::SafeRequestStore[TYPE] + end + + def self.get_level + ::Gitlab::SafeRequestStore[LEVEL] + end + + def self.get_scope + ::Gitlab::SafeRequestStore[SCOPE] + end + + def self.get_search_duration_s + ::Gitlab::SafeRequestStore[SEARCH_DURATION_S] + end + + def self.payload + { + TYPE => get_type, + LEVEL => get_level, + SCOPE => get_scope, + SEARCH_DURATION_S => get_search_duration_s + }.compact + end + + def self.set_information(type:, level:, scope:, search_duration_s:) + if ::Gitlab::SafeRequestStore.active? + ::Gitlab::SafeRequestStore[TYPE] = type + ::Gitlab::SafeRequestStore[LEVEL] = level + ::Gitlab::SafeRequestStore[SCOPE] = scope + ::Gitlab::SafeRequestStore[SEARCH_DURATION_S] = search_duration_s + end + end + end + end +end diff --git a/lib/gitlab/instrumentation_helper.rb b/lib/gitlab/instrumentation_helper.rb index b8d8deb3418..b8a2567b775 100644 --- a/lib/gitlab/instrumentation_helper.rb +++ b/lib/gitlab/instrumentation_helper.rb @@ -34,6 +34,7 @@ module Gitlab instrument_worker_id(payload) instrument_uploads(payload) instrument_rate_limiting_gates(payload) + instrument_global_search_api(payload) end def instrument_gitaly(payload) @@ -131,6 +132,10 @@ module Gitlab payload.merge!(::Gitlab::Instrumentation::RateLimitingGates.payload) end + def instrument_global_search_api(payload) + payload.merge!(::Gitlab::Instrumentation::GlobalSearchApi.payload) + end + # Returns the queuing duration for a Sidekiq job in seconds, as a float, if the # `enqueued_at` field or `created_at` field is available. # diff --git a/lib/gitlab/issues/rebalancing/state.rb b/lib/gitlab/issues/rebalancing/state.rb index 3d3fd9419b2..abb50281f7a 100644 --- a/lib/gitlab/issues/rebalancing/state.rb +++ b/lib/gitlab/issues/rebalancing/state.rb @@ -38,10 +38,10 @@ module Gitlab def rebalance_in_progress? is_running = case rebalanced_container_type when NAMESPACE - namespace_ids = self.class.current_rebalancing_containers.map {|string| string.split("#{NAMESPACE}/").second.to_i }.compact + namespace_ids = self.class.current_rebalancing_containers.map { |string| string.split("#{NAMESPACE}/").second.to_i }.compact namespace_ids.include?(root_namespace.id) when PROJECT - project_ids = self.class.current_rebalancing_containers.map {|string| string.split("#{PROJECT}/").second.to_i }.compact + project_ids = self.class.current_rebalancing_containers.map { |string| string.split("#{PROJECT}/").second.to_i }.compact project_ids.include?(projects.take.id) # rubocop:disable CodeReuse/ActiveRecord else false @@ -90,11 +90,11 @@ module Gitlab end def issue_count - @issue_count ||= with_redis { |redis| redis.zcard(issue_ids_key)} + @issue_count ||= with_redis { |redis| redis.zcard(issue_ids_key) } end def remove_current_project_id_cache - with_redis { |redis| redis.del(current_project_key)} + with_redis { |redis| redis.del(current_project_key) } end def refresh_keys_expiration diff --git a/lib/gitlab/jira_import/issues_importer.rb b/lib/gitlab/jira_import/issues_importer.rb index f1ead57c911..25dffcbe0ee 100644 --- a/lib/gitlab/jira_import/issues_importer.rb +++ b/lib/gitlab/jira_import/issues_importer.rb @@ -7,6 +7,10 @@ module Gitlab # see https://jira.atlassian.com/browse/JRACLOUD-67570 # We set it to 1000 in case they change their mind. BATCH_SIZE = 1000 + JIRA_IMPORT_THRESHOLD = 100_000 + JIRA_IMPORT_PAUSE_LIMIT = 50_000 + + RetriesExceededError = Class.new(RuntimeError) attr_reader :imported_items_cache_key, :start_at, :job_waiter @@ -66,6 +70,10 @@ module Gitlab @issue_type_id, { iid: next_iid } ).execute + + # Pause the importer to allow the import to catch up and cache to drain + pause_jira_issue_importer if jira_import_issue_worker.queue_size > JIRA_IMPORT_THRESHOLD + Gitlab::JiraImport::ImportIssueWorker.perform_async(project.id, jira_issue.id, issue_attrs, job_waiter.key) job_waiter.jobs_remaining += 1 @@ -89,6 +97,27 @@ module Gitlab job_waiter end + def jira_import_issue_worker + @_jira_import_issue_worker ||= Gitlab::JiraImport::ImportIssueWorker + end + + def pause_jira_issue_importer + # Wait for import workers to drop below 50K in the iterations of the timeout + # timeout - Set to 5 seconds. + # Time to process 100K jobs is currently ~14 seconds. + # Source: https://github.com/mperham/sidekiq#performance + # retries - Set to 10 times to avoid indefinitely pause. + # Raises an error if the queue does not reduce below the limit after 10 tries. + + retries = 10 + while retries > 0 && jira_import_issue_worker.queue_size >= JIRA_IMPORT_PAUSE_LIMIT + job_waiter.wait(5) + retries -= 1 + end + + raise RetriesExceededError, 'Retry failed after 10 attempts' if retries == 0 + end + def fetch_issues(start_at) client.Issue.jql("PROJECT='#{jira_project_key}' ORDER BY created ASC", { max_results: BATCH_SIZE, start_at: start_at }) end diff --git a/lib/gitlab/job_waiter.rb b/lib/gitlab/job_waiter.rb index 2cede524cac..52234b50a1f 100644 --- a/lib/gitlab/job_waiter.rb +++ b/lib/gitlab/job_waiter.rb @@ -35,7 +35,7 @@ module Gitlab end def self.key?(key) - key.is_a?(String) && key =~ /\A#{KEY_PREFIX}:\h{8}-\h{4}-\h{4}-\h{4}-\h{12}\z/ + key.is_a?(String) && key =~ /\A#{KEY_PREFIX}:\h{8}-\h{4}-\h{4}-\h{4}-\h{12}\z/o end attr_reader :key, :finished, :worker_label diff --git a/lib/gitlab/kubernetes.rb b/lib/gitlab/kubernetes.rb index 22bd00751bc..15163bd4a57 100644 --- a/lib/gitlab/kubernetes.rb +++ b/lib/gitlab/kubernetes.rb @@ -63,7 +63,11 @@ module Gitlab return unless containers.present? && pod_name.present? && phase == "Running" - created_at = DateTime.parse(metadata["creationTimestamp"]) rescue nil + created_at = begin + DateTime.parse(metadata["creationTimestamp"]) + rescue StandardError + nil + end containers.map do |container| { diff --git a/lib/gitlab/legacy_github_import/client.rb b/lib/gitlab/legacy_github_import/client.rb index 7a9dae3a3de..7d78c8dee25 100644 --- a/lib/gitlab/legacy_github_import/client.rb +++ b/lib/gitlab/legacy_github_import/client.rb @@ -136,7 +136,7 @@ module Gitlab last_response = api.last_response - if block_given? + if block yield data # api.last_response could change while we're yielding (e.g. fetching labels for each PR) # so we cache our own last response diff --git a/lib/gitlab/mail_room.rb b/lib/gitlab/mail_room.rb index ef5ca56a13b..f7cd28df5c9 100644 --- a/lib/gitlab/mail_room.rb +++ b/lib/gitlab/mail_room.rb @@ -33,11 +33,11 @@ module Gitlab # fetched from YML config file. MAILBOX_SPECIFIC_CONFIGS = { incoming_email: { - queue: 'email_receiver', + queue: 'default', worker: 'EmailReceiverWorker' }, service_desk_email: { - queue: 'service_desk_email_receiver', + queue: 'default', worker: 'ServiceDeskEmailReceiverWorker' } }.freeze diff --git a/lib/gitlab/memory/jemalloc.rb b/lib/gitlab/memory/jemalloc.rb index 454c54569de..7163a70a5cb 100644 --- a/lib/gitlab/memory/jemalloc.rb +++ b/lib/gitlab/memory/jemalloc.rb @@ -14,6 +14,8 @@ module Gitlab STATS_DEFAULT_FORMAT = :json + FILENAME_PREFIX = 'jemalloc_stats' + # Return jemalloc stats as a string. def stats(format: STATS_DEFAULT_FORMAT) verify_format!(format) @@ -23,16 +25,24 @@ module Gitlab end end - # Write jemalloc stats to the given directory. - def dump_stats(path:, format: STATS_DEFAULT_FORMAT) + # Write jemalloc stats to the given directory + # @param [String] path Directory path the dump will be put into + # @param [String] format `json` or `txt` + # @param [String] filename_label Optional custom string that will be injected into the file name, e.g. `worker_0` + # @return [String] Full path to the resulting dump file + def dump_stats(path:, format: STATS_DEFAULT_FORMAT, filename_label: nil) verify_format!(format) + format_settings = STATS_FORMATS[format] + file_path = File.join(path, file_name(format_settings[:extension], filename_label)) + with_malloc_stats_print do |stats_print| - format_settings = STATS_FORMATS[format] - File.open(File.join(path, file_name(format_settings[:extension])), 'wb') do |io| + File.open(file_path, 'wb') do |io| write_stats(stats_print, io, format_settings) end end + + file_path end private @@ -80,8 +90,8 @@ module Gitlab stats_print.call(callback, nil, format[:options]) end - def file_name(extension) - "jemalloc_stats.#{$$}.#{Time.current.to_i}.#{extension}" + def file_name(extension, filename_label) + [FILENAME_PREFIX, $$, filename_label, Time.current.to_i, extension].reject(&:blank?).join('.') end end end diff --git a/lib/gitlab/memory/reports/jemalloc_stats.rb b/lib/gitlab/memory/reports/jemalloc_stats.rb new file mode 100644 index 00000000000..b99bec4ac3e --- /dev/null +++ b/lib/gitlab/memory/reports/jemalloc_stats.rb @@ -0,0 +1,67 @@ +# frozen_string_literal: true + +module Gitlab + module Memory + module Reports + class JemallocStats + # On prod, Jemalloc reports sizes were ~2.5 MB: + # https://gitlab.com/gitlab-com/gl-infra/reliability/-/issues/15993#note_1014767214 + # We configured 1GB emptyDir per pod: + # https://gitlab.com/gitlab-com/gl-infra/k8s-workloads/gitlab-com/-/merge_requests/1949 + # The pod will be evicted when the size limit is exceeded. We never want this to happen, for availability. + # + # With the default, we have a headroom (250*2.5MB=625<1000 MB) to fit into configured emptyDir. + # It would allow us to keep 3+ days worth of reports for 6 workers running every 2 hours: 3*6*12=216<250 + # + # The cleanup logic will be redundant after we'll implement the uploads, which would perform the cleanup. + DEFAULT_MAX_REPORTS_STORED = 250 + + def initialize(reports_path:) + @reports_path = reports_path + end + + def run + return unless active? + + Gitlab::Memory::Jemalloc.dump_stats(path: reports_path, filename_label: worker_id).tap { cleanup } + end + + def active? + Feature.enabled?(:report_jemalloc_stats, type: :ops) + end + + private + + attr_reader :reports_path + + def cleanup + reports_files_modified_order[0...-max_reports_stored].each do |f| + File.unlink(f) if File.exist?(f) + rescue Errno::ENOENT + # Path does not exist: Ignore. We already check `File.exist?` + # Rescue to be extra safe, because each worker could perform a cleanup + end + end + + def reports_files_modified_order + pattern = File.join(reports_path, "#{Gitlab::Memory::Jemalloc::FILENAME_PREFIX}*") + + Dir.glob(pattern).sort_by do |f| + test('M', f) + rescue Errno::ENOENT + # Path does not exist: Return any timestamp to proceed with the sort + Time.current + end + end + + def worker_id + ::Prometheus::PidProvider.worker_id + end + + def max_reports_stored + ENV["GITLAB_DIAGNOSTIC_REPORTS_JEMALLOC_MAX_REPORTS_STORED"] || DEFAULT_MAX_REPORTS_STORED + end + end + end + end +end diff --git a/lib/gitlab/memory/reports_daemon.rb b/lib/gitlab/memory/reports_daemon.rb new file mode 100644 index 00000000000..ed1da8baab5 --- /dev/null +++ b/lib/gitlab/memory/reports_daemon.rb @@ -0,0 +1,106 @@ +# frozen_string_literal: true + +module Gitlab + module Memory + class ReportsDaemon < Daemon + DEFAULT_SLEEP_S = 7200 # 2 hours + DEFAULT_SLEEP_MAX_DELTA_S = 600 # 0..10 minutes + DEFAULT_SLEEP_BETWEEN_REPORTS_S = 120 # 2 minutes + + DEFAULT_REPORTS_PATH = '/tmp' + + def initialize(**options) + super + + @alive = true + + @sleep_s = + ENV['GITLAB_DIAGNOSTIC_REPORTS_SLEEP_S']&.to_i || DEFAULT_SLEEP_S + @sleep_max_delta_s = + ENV['GITLAB_DIAGNOSTIC_REPORTS_SLEEP_MAX_DELTA_S']&.to_i || DEFAULT_SLEEP_MAX_DELTA_S + @sleep_between_reports_s = + ENV['GITLAB_DIAGNOSTIC_REPORTS_SLEEP_BETWEEN_REPORTS_S']&.to_i || DEFAULT_SLEEP_BETWEEN_REPORTS_S + + @reports_path = + ENV["GITLAB_DIAGNOSTIC_REPORTS_PATH"] || DEFAULT_REPORTS_PATH + + @reports = [Gitlab::Memory::Reports::JemallocStats.new(reports_path: reports_path)] + + init_prometheus_metrics + end + + attr_reader :sleep_s, :sleep_max_delta_s, :sleep_between_reports_s, :reports_path + + def run_thread + while alive + sleep interval_with_jitter + + reports.select(&:active?).each do |report| + start_monotonic_time = Gitlab::Metrics::System.monotonic_time + start_thread_cpu_time = Gitlab::Metrics::System.thread_cpu_time + + file_path = report.run + + cpu_s = Gitlab::Metrics::System.thread_cpu_duration(start_thread_cpu_time) + duration_s = Gitlab::Metrics::System.monotonic_time - start_monotonic_time + + log_report(label: report_label(report), cpu_s: cpu_s, duration_s: duration_s, size: file_size(file_path)) + @report_duration_counter.increment({ report: report_label(report) }, duration_s) + + sleep sleep_between_reports_s + end + end + end + + private + + attr_reader :alive, :reports + + # Returns the sleep interval with a random adjustment. + # The random adjustment is put in place to ensure continued availability. + def interval_with_jitter + sleep_s + rand(sleep_max_delta_s) + end + + def log_report(label:, duration_s:, cpu_s:, size:) + Gitlab::AppLogger.info( + message: 'finished', + pid: $$, + worker_id: worker_id, + perf_report: label, + duration_s: duration_s.round(2), + cpu_s: cpu_s.round(2), + perf_report_size_bytes: size + ) + end + + def worker_id + ::Prometheus::PidProvider.worker_id + end + + def report_label(report) + report.class.to_s.demodulize.underscore + end + + def stop_working + @alive = false + end + + def init_prometheus_metrics + default_labels = { pid: worker_id } + + @report_duration_counter = Gitlab::Metrics.counter( + :gitlab_diag_report_duration_seconds_total, + 'Total time elapsed for running diagnostic report', + default_labels + ) + end + + def file_size(file_path) + File.size(file_path.to_s) + rescue Errno::ENOENT + 0 + end + end + end +end diff --git a/lib/gitlab/memory/watchdog.rb b/lib/gitlab/memory/watchdog.rb index db75ba8a47d..91edb68ad66 100644 --- a/lib/gitlab/memory/watchdog.rb +++ b/lib/gitlab/memory/watchdog.rb @@ -15,7 +15,7 @@ module Gitlab # # The duration for which a process may be above a given fragmentation # threshold is computed as `max_strikes * sleep_time_seconds`. - class Watchdog < Daemon + class Watchdog DEFAULT_SLEEP_TIME_SECONDS = 60 DEFAULT_HEAP_FRAG_THRESHOLD = 0.5 DEFAULT_MAX_STRIKES = 5 @@ -91,7 +91,7 @@ module Gitlab attr_reader :strikes, :max_heap_fragmentation, :max_strikes, :sleep_time_seconds - def run_thread + def call @logger.info(log_labels.merge(message: 'started')) while @alive @@ -103,6 +103,10 @@ module Gitlab @logger.info(log_labels.merge(message: 'stopped')) end + def stop + @alive = false + end + private def monitor_heap_fragmentation @@ -141,10 +145,6 @@ module Gitlab @handler end - def stop_working - @alive = false - end - def log_labels { pid: $$, @@ -167,15 +167,13 @@ module Gitlab end def init_prometheus_metrics(max_heap_fragmentation) - default_labels = { pid: worker_id } - @heap_frag_limit = Gitlab::Metrics.gauge( :gitlab_memwd_heap_frag_limit, - 'The configured limit for how fragmented the Ruby heap is allowed to be', - default_labels + 'The configured limit for how fragmented the Ruby heap is allowed to be' ) @heap_frag_limit.set({}, max_heap_fragmentation) + default_labels = { pid: worker_id } @heap_frag_violations = Gitlab::Metrics.counter( :gitlab_memwd_heap_frag_violations_total, 'Total number of times heap fragmentation in a Ruby process exceeded its allowed maximum', diff --git a/lib/gitlab/merge_requests/mergeability/check_result.rb b/lib/gitlab/merge_requests/mergeability/check_result.rb index 5284d20d423..a25156661af 100644 --- a/lib/gitlab/merge_requests/mergeability/check_result.rb +++ b/lib/gitlab/merge_requests/mergeability/check_result.rb @@ -13,11 +13,11 @@ module Gitlab end def self.success(payload: {}) - new(status: SUCCESS_STATUS, payload: default_payload.merge(payload)) + new(status: SUCCESS_STATUS, payload: default_payload.merge(**payload)) end def self.failed(payload: {}) - new(status: FAILED_STATUS, payload: default_payload.merge(payload)) + new(status: FAILED_STATUS, payload: default_payload.merge(**payload)) end def self.from_hash(data) diff --git a/lib/gitlab/metrics/dashboard/url.rb b/lib/gitlab/metrics/dashboard/url.rb index 6dcc73c0f6a..d4f779ad79d 100644 --- a/lib/gitlab/metrics/dashboard/url.rb +++ b/lib/gitlab/metrics/dashboard/url.rb @@ -42,7 +42,7 @@ module Gitlab #{DASH_PATTERN}? /grafana /metrics_dashboard - }x + }xo ) end end @@ -64,7 +64,7 @@ module Gitlab /(?<cluster_id>\d+) /? ( (/metrics) | ( /metrics_dashboard\.json ) )? - }x + }xo ) end end @@ -82,7 +82,7 @@ module Gitlab /alerts /(?<alert>\d+) /metrics_dashboard(\.json)? - }x + }xo ) end end @@ -112,7 +112,7 @@ module Gitlab /environments /(?<environment>\d+) /(metrics_dashboard|metrics) - }x + }xo end def non_environment_metrics_regex @@ -125,7 +125,7 @@ module Gitlab environment=(?<environment>\d+) .* ) - }x + }xo end def regex_for_project_metrics(path_suffix_pattern) diff --git a/lib/gitlab/metrics/methods/metric_options.rb b/lib/gitlab/metrics/methods/metric_options.rb index 1e488df3e99..e93a90415c7 100644 --- a/lib/gitlab/metrics/methods/metric_options.rb +++ b/lib/gitlab/metrics/methods/metric_options.rb @@ -61,7 +61,7 @@ module Gitlab end def evaluate(&block) - instance_eval(&block) if block_given? + instance_eval(&block) if block self end diff --git a/lib/gitlab/middleware/read_only/controller.rb b/lib/gitlab/middleware/read_only/controller.rb index 65c08664a2b..69e2ae55cb0 100644 --- a/lib/gitlab/middleware/read_only/controller.rb +++ b/lib/gitlab/middleware/read_only/controller.rb @@ -83,7 +83,11 @@ module Gitlab end def route_hash - @route_hash ||= Rails.application.routes.recognize_path(request_url, { method: request.request_method }) rescue {} + @route_hash ||= begin + Rails.application.routes.recognize_path(request_url, { method: request.request_method }) + rescue StandardError + {} + end end def request_url diff --git a/lib/gitlab/null_request_store.rb b/lib/gitlab/null_request_store.rb index 8db331dcb9f..4642dcf9e91 100644 --- a/lib/gitlab/null_request_store.rb +++ b/lib/gitlab/null_request_store.rb @@ -35,7 +35,7 @@ module Gitlab end def delete(key, &block) - yield(key) if block_given? + yield(key) if block end end end diff --git a/lib/gitlab/object_hierarchy.rb b/lib/gitlab/object_hierarchy.rb index 9a850246221..0576aed811c 100644 --- a/lib/gitlab/object_hierarchy.rb +++ b/lib/gitlab/object_hierarchy.rb @@ -65,7 +65,8 @@ module Gitlab # Note: By default the order is breadth-first # rubocop: disable CodeReuse/ActiveRecord def base_and_ancestors(upto: nil, hierarchy_order: nil) - cte = base_and_ancestors_cte(upto, hierarchy_order) + upto_id = upto.try(:id) || upto + cte = base_and_ancestors_cte(upto_id, hierarchy_order) recursive_query = if hierarchy_order # othewise depth won't be available for outer query diff --git a/lib/gitlab/pagination/gitaly_keyset_pager.rb b/lib/gitlab/pagination/gitaly_keyset_pager.rb index 8bbc9a93610..1f1061fe4f1 100644 --- a/lib/gitlab/pagination/gitaly_keyset_pager.rb +++ b/lib/gitlab/pagination/gitaly_keyset_pager.rb @@ -12,9 +12,11 @@ module Gitlab @project = project end - # It is expected that the given finder will respond to `execute` method with `gitaly_pagination: true` option + # It is expected that the given finder will respond to `execute` method with `gitaly_pagination:` option # and supports pagination via gitaly. def paginate(finder) + return finder.execute(gitaly_pagination: false) if no_pagination? + return paginate_via_gitaly(finder) if keyset_pagination_enabled?(finder) return paginate_first_page_via_gitaly(finder) if paginate_first_page?(finder) @@ -26,6 +28,10 @@ module Gitlab private + def no_pagination? + params[:pagination] == 'none' + end + def keyset_pagination_enabled?(finder) return false unless params[:pagination] == "keyset" diff --git a/lib/gitlab/patch/global_id.rb b/lib/gitlab/patch/global_id.rb index 145a7bfe842..9b093186aec 100644 --- a/lib/gitlab/patch/global_id.rb +++ b/lib/gitlab/patch/global_id.rb @@ -9,7 +9,7 @@ module Gitlab super if deprecation = Gitlab::GlobalId::Deprecations.deprecation_for(model_name) - @new_model_name = deprecation.new_model_name + @new_model_name = deprecation.new_name end end diff --git a/lib/gitlab/path_regex.rb b/lib/gitlab/path_regex.rb index b05d7160a4b..b0804c2ff66 100644 --- a/lib/gitlab/path_regex.rb +++ b/lib/gitlab/path_regex.rb @@ -204,7 +204,7 @@ module Gitlab end def namespace_format_regex - @namespace_format_regex ||= /\A#{NAMESPACE_FORMAT_REGEX}\z/.freeze + @namespace_format_regex ||= /\A#{NAMESPACE_FORMAT_REGEX}\z/o.freeze end def namespace_format_message @@ -213,7 +213,7 @@ module Gitlab end def project_path_format_regex - @project_path_format_regex ||= /\A#{PROJECT_PATH_FORMAT_REGEX}\z/.freeze + @project_path_format_regex ||= /\A#{PROJECT_PATH_FORMAT_REGEX}\z/o.freeze end def project_path_format_message diff --git a/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb b/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb index ac5c907465e..fbc77113875 100644 --- a/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb +++ b/lib/gitlab/performance_bar/redis_adapter_when_peek_enabled.rb @@ -17,7 +17,7 @@ module Gitlab # to a structured log # rubocop:disable Gitlab/ModuleWithInstanceVariables def enqueue_stats_job(request_id) - return unless Feature.enabled?(:performance_bar_stats) + return unless Feature.enabled?(:performance_bar_stats, type: :ops) @client.sadd(GitlabPerformanceBarStatsWorker::STATS_KEY, request_id) diff --git a/lib/gitlab/process_management.rb b/lib/gitlab/process_management.rb index 25a198e4a6a..f8a1a3a97de 100644 --- a/lib/gitlab/process_management.rb +++ b/lib/gitlab/process_management.rb @@ -43,7 +43,9 @@ module Gitlab # Waits for the given process to complete using a separate thread. def self.wait_async(pid) Thread.new do - Process.wait(pid) rescue Errno::ECHILD + Process.wait(pid) + rescue StandardError + nil # There is no reason to return `Errno::ECHILD` if it catches a `TypeError` end end diff --git a/lib/gitlab/profiler.rb b/lib/gitlab/profiler.rb index d15b57eb888..fd9f73d18c1 100644 --- a/lib/gitlab/profiler.rb +++ b/lib/gitlab/profiler.rb @@ -146,7 +146,7 @@ module Gitlab if user API::Helpers::CommonHelpers.send(:define_method, :find_current_user!) { user } # rubocop:disable GitlabSecurity/PublicSend ApplicationController.send(:define_method, :current_user) { user } # rubocop:disable GitlabSecurity/PublicSend - ApplicationController.send(:define_method, :authenticate_user!) { } # rubocop:disable GitlabSecurity/PublicSend + ApplicationController.send(:define_method, :authenticate_user!) {} # rubocop:disable GitlabSecurity/PublicSend end yield.tap do diff --git a/lib/gitlab/project_template.rb b/lib/gitlab/project_template.rb index 0ab6055408f..6673940ccf3 100644 --- a/lib/gitlab/project_template.rb +++ b/lib/gitlab/project_template.rb @@ -49,7 +49,7 @@ module Gitlab ProjectTemplate.new('spring', 'Spring', _('Includes an MVC structure, mvnw and pom.xml to help you get started'), 'https://gitlab.com/gitlab-org/project-templates/spring', 'illustrations/logos/spring.svg'), ProjectTemplate.new('express', 'NodeJS Express', _('Includes an MVC structure to help you get started'), 'https://gitlab.com/gitlab-org/project-templates/express', 'illustrations/logos/express.svg'), ProjectTemplate.new('iosswift', 'iOS (Swift)', _('A ready-to-go template for use with iOS Swift apps'), 'https://gitlab.com/gitlab-org/project-templates/iosswift', 'illustrations/logos/swift.svg'), - ProjectTemplate.new('dotnetcore', '.NET Core', _('A .NET Core console application template, customizable for any .NET Core project'), 'https://gitlab.com/gitlab-org/project-templates/dotnetcore', 'illustrations/logos/dotnet.svg'), + ProjectTemplate.new('dotnetcore', '.NET Core', _('A .NET Core console application template, customizable for any .NET Core project'), 'https://gitlab.com/gitlab-org/project-templates/dotnetcore', 'illustrations/third-party-logos/dotnet.svg'), ProjectTemplate.new('android', 'Android', _('A ready-to-go template for use with Android apps'), 'https://gitlab.com/gitlab-org/project-templates/android', 'illustrations/logos/android.svg'), ProjectTemplate.new('gomicro', 'Go Micro', _('Go Micro is a framework for micro service development'), 'https://gitlab.com/gitlab-org/project-templates/go-micro', 'illustrations/logos/gomicro.svg'), ProjectTemplate.new('gatsby', 'Pages/Gatsby', _('Everything you need to create a GitLab Pages site using Gatsby'), 'https://gitlab.com/pages/gatsby', 'illustrations/third-party-logos/gatsby.svg'), diff --git a/lib/gitlab/query_limiting/middleware.rb b/lib/gitlab/query_limiting/middleware.rb index 76de547b14f..0ee700caf5b 100644 --- a/lib/gitlab/query_limiting/middleware.rb +++ b/lib/gitlab/query_limiting/middleware.rb @@ -46,7 +46,11 @@ module Gitlab def action_for_grape(env) endpoint = env[ENDPOINT_KEY] - route = endpoint.route rescue nil + route = begin + endpoint.route + rescue StandardError + nil + end "#{route.request_method} #{route.path}" if route end diff --git a/lib/gitlab/quick_actions/command_definition.rb b/lib/gitlab/quick_actions/command_definition.rb index fcb7bc967ca..d9135d1bacb 100644 --- a/lib/gitlab/quick_actions/command_definition.rb +++ b/lib/gitlab/quick_actions/command_definition.rb @@ -89,17 +89,29 @@ module Gitlab def to_h(context) desc = description if desc.respond_to?(:call) - desc = context.instance_exec(&desc) rescue '' + desc = begin + context.instance_exec(&desc) + rescue StandardError + '' + end end warn = warning if warn.respond_to?(:call) - warn = context.instance_exec(&warn) rescue '' + warn = begin + context.instance_exec(&warn) + rescue StandardError + '' + end end prms = params if prms.respond_to?(:call) - prms = Array(context.instance_exec(&prms)) rescue params + prms = begin + Array(context.instance_exec(&prms)) + rescue StandardError + params + end end { diff --git a/lib/gitlab/quick_actions/dsl.rb b/lib/gitlab/quick_actions/dsl.rb index a2dfcc6de9a..dfbc00ef847 100644 --- a/lib/gitlab/quick_actions/dsl.rb +++ b/lib/gitlab/quick_actions/dsl.rb @@ -30,11 +30,11 @@ module Gitlab # # Awesome code block # end def desc(text = '', &block) - @description = block_given? ? block : text + @description = block || text end def warning(text = '', &block) - @warning = block_given? ? block : text + @warning = block || text end def icon(string = '') @@ -51,7 +51,7 @@ module Gitlab # # Awesome code block # end def params(*params, &block) - @params = block_given? ? block : params + @params = block || params end # Allows to give an explanation of what the command will do when @@ -67,7 +67,7 @@ module Gitlab # # Awesome code block # end def explanation(text = '', &block) - @explanation = block_given? ? block : text + @explanation = block || text end # Allows to provide a message about quick action execution result, success or failure. @@ -96,7 +96,7 @@ module Gitlab # end # def execution_message(text = '', &block) - @execution_message = block_given? ? block : text + @execution_message = block || text end # Allows to define type(s) that must be met in order for the command diff --git a/lib/gitlab/quick_actions/merge_request_actions.rb b/lib/gitlab/quick_actions/merge_request_actions.rb index 167e7ad67a9..3cb01db1491 100644 --- a/lib/gitlab/quick_actions/merge_request_actions.rb +++ b/lib/gitlab/quick_actions/merge_request_actions.rb @@ -292,76 +292,6 @@ module Gitlab @updates[:reviewer_ids] = [] end end - - desc do - if quick_action_target.allows_multiple_reviewers? - _('Request attention from assignee(s) or reviewer(s)') - else - _('Request attention from assignee or reviewer') - end - end - explanation do |users| - _('Request attention from %{users_sentence}.') % { users_sentence: reviewer_users_sentence(users) } - end - execution_message do |users = nil| - if users.blank? - _("Failed to request attention because no user was found.") - else - _('Requested attention from %{users_sentence}.') % { users_sentence: reviewer_users_sentence(users) } - end - end - params do - quick_action_target.allows_multiple_reviewers? ? '@user1 @user2' : '@user' - end - types MergeRequest - condition do - current_user.mr_attention_requests_enabled? && - current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project) - end - parse_params do |attention_param| - extract_users(attention_param) - end - command :attention, :attn do |users| - next if users.empty? - - users.each do |user| - ::MergeRequests::ToggleAttentionRequestedService.new(project: quick_action_target.project, merge_request: quick_action_target, current_user: current_user, user: user).execute - end - end - - desc do - if quick_action_target.allows_multiple_reviewers? - _('Remove attention request(s)') - else - _('Remove attention request') - end - end - explanation do |users| - _('Removes attention from %{users_sentence}.') % { users_sentence: reviewer_users_sentence(users) } - end - execution_message do |users = nil| - if users.blank? - _("Failed to remove attention because no user was found.") - else - _('Removed attention from %{users_sentence}.') % { users_sentence: reviewer_users_sentence(users) } - end - end - params do - quick_action_target.allows_multiple_reviewers? ? '@user1 @user2' : '@user' - end - types MergeRequest - condition do - current_user.mr_attention_requests_enabled? && - current_user.can?(:"admin_#{quick_action_target.to_ability_name}", project) - end - parse_params do |attention_param| - extract_users(attention_param) - end - command :remove_attention do |users| - next if users.empty? - - ::MergeRequests::BulkRemoveAttentionRequestedService.new(project: quick_action_target.project, merge_request: quick_action_target, current_user: current_user, users: users).execute - end end def reviewer_users_sentence(users) diff --git a/lib/gitlab/quick_actions/spend_time_and_date_separator.rb b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb index 03b2a1086bb..3794f2f8818 100644 --- a/lib/gitlab/quick_actions/spend_time_and_date_separator.rb +++ b/lib/gitlab/quick_actions/spend_time_and_date_separator.rb @@ -43,7 +43,11 @@ module Gitlab def valid_date? string_date = @spend_arg.match(DATE_REGEX)[0] - date = Date.parse(string_date) rescue nil + date = begin + Date.parse(string_date) + rescue StandardError + nil + end date_past_or_today?(date) end diff --git a/lib/gitlab/redis/cache.rb b/lib/gitlab/redis/cache.rb index a2c7b5e29db..4ab1024d528 100644 --- a/lib/gitlab/redis/cache.rb +++ b/lib/gitlab/redis/cache.rb @@ -12,7 +12,7 @@ module Gitlab redis: pool, compress: Gitlab::Utils.to_boolean(ENV.fetch('ENABLE_REDIS_CACHE_COMPRESSION', '1')), namespace: CACHE_NAMESPACE, - expires_in: 2.weeks # Cache should not grow forever + expires_in: ENV.fetch('GITLAB_RAILS_CACHE_DEFAULT_TTL_SECONDS', 2.weeks).to_i # Cache should not grow forever } end end diff --git a/lib/gitlab/redis/multi_store.rb b/lib/gitlab/redis/multi_store.rb index 94f06e957cf..cdd2ac6100e 100644 --- a/lib/gitlab/redis/multi_store.rb +++ b/lib/gitlab/redis/multi_store.rb @@ -274,7 +274,7 @@ module Gitlab # rubocop:disable GitlabSecurity/PublicSend def send_command(redis_instance, command_name, *args, **kwargs, &block) - if block_given? + if block # Make sure that block is wrapped and executed only on the redis instance that is executing the block redis_instance.send(command_name, *args, **kwargs) do |*params| with_instance(redis_instance, *params, &block) diff --git a/lib/gitlab/regex.rb b/lib/gitlab/regex.rb index 551750f9798..10c03103899 100644 --- a/lib/gitlab/regex.rb +++ b/lib/gitlab/regex.rb @@ -70,7 +70,7 @@ module Gitlab end def npm_package_name_regex - @npm_package_name_regex ||= %r{\A(?:@(#{Gitlab::PathRegex::NAMESPACE_FORMAT_REGEX})/)?[-+\.\_a-zA-Z0-9]+\z} + @npm_package_name_regex ||= %r{\A(?:@(#{Gitlab::PathRegex::NAMESPACE_FORMAT_REGEX})/)?[-+\.\_a-zA-Z0-9]+\z}o end def nuget_package_name_regex @@ -128,15 +128,15 @@ module Gitlab def debian_architecture_regex # See official parser: https://git.dpkg.org/cgit/dpkg/dpkg.git/tree/lib/dpkg/arch.c?id=9e0c88ec09475f4d1addde9cdba1ad7849720356#n43 # But we limit to lower case - @debian_architecture_regex ||= %r{\A#{::Packages::Debian::ARCHITECTURE_REGEX}\z}.freeze + @debian_architecture_regex ||= %r{\A#{::Packages::Debian::ARCHITECTURE_REGEX}\z}o.freeze end def debian_distribution_regex - @debian_distribution_regex ||= %r{\A#{::Packages::Debian::DISTRIBUTION_REGEX}\z}i.freeze + @debian_distribution_regex ||= %r{\A#{::Packages::Debian::DISTRIBUTION_REGEX}\z}io.freeze end def debian_component_regex - @debian_component_regex ||= %r{\A#{::Packages::Debian::COMPONENT_REGEX}\z}.freeze + @debian_component_regex ||= %r{\A#{::Packages::Debian::COMPONENT_REGEX}\z}o.freeze end def helm_channel_regex diff --git a/lib/gitlab/relative_positioning/item_context.rb b/lib/gitlab/relative_positioning/item_context.rb index ac0598d8d34..a9966c0f3fb 100644 --- a/lib/gitlab/relative_positioning/item_context.rb +++ b/lib/gitlab/relative_positioning/item_context.rb @@ -91,8 +91,7 @@ module Gitlab relation = yield relation if block_given? relation - .pluck(grouping_column, Arel.sql("#{calculation}(relative_position) AS position")) - .first&.last + .pick(grouping_column, Arel.sql("#{calculation}(relative_position) AS position"))&.last end def grouping_column @@ -163,9 +162,7 @@ module Gitlab gap = model_class .from(items_with_next_pos, :items) .where('next_pos IS NULL OR ABS(pos::bigint - next_pos::bigint) >= ?', MIN_GAP) - .limit(1) - .pluck(:pos, :next_pos) - .first + .pick(:pos, :next_pos) return if gap.nil? || gap.first == default_end diff --git a/lib/gitlab/saas.rb b/lib/gitlab/saas.rb index 4683f611444..16a7a697e6a 100644 --- a/lib/gitlab/saas.rb +++ b/lib/gitlab/saas.rb @@ -52,6 +52,10 @@ module Gitlab def self.doc_url 'https://docs.gitlab.com' end + + def self.community_forum_url + 'https://forum.gitlab.com' + end end end diff --git a/lib/gitlab/safe_request_loader.rb b/lib/gitlab/safe_request_loader.rb index 89eca16c272..4fc88322210 100644 --- a/lib/gitlab/safe_request_loader.rb +++ b/lib/gitlab/safe_request_loader.rb @@ -14,7 +14,7 @@ module Gitlab end def execute(&block) - raise ArgumentError, 'Block is mandatory' unless block_given? + raise ArgumentError, 'Block is mandatory' unless block load_resource_data remove_loaded_resource_ids diff --git a/lib/gitlab/search/query.rb b/lib/gitlab/search/query.rb index 97ee7c7817d..4c5fae87420 100644 --- a/lib/gitlab/search/query.rb +++ b/lib/gitlab/search/query.rb @@ -13,7 +13,7 @@ module Gitlab @filters = [] @filter_options = { default_parser: :downcase.to_proc }.merge(filter_opts) - self.instance_eval(&block) if block_given? + self.instance_eval(&block) if block @query = Gitlab::Search::ParsedQuery.new(*extract_filters) # set the ParsedQuery as our default delegator thanks to SimpleDelegator diff --git a/lib/gitlab/seeder.rb b/lib/gitlab/seeder.rb index ec514adafc8..2450ad88bbb 100644 --- a/lib/gitlab/seeder.rb +++ b/lib/gitlab/seeder.rb @@ -151,6 +151,48 @@ module Gitlab model.logger = old_loggers[connection_name] end end + + module Ci + class DailyBuildGroupReportResult + DEFAULT_BRANCH = 'master' + COUNT_OF_DAYS = 5 + + def initialize(project) + @project = project + @last_pipeline = project.last_pipeline + end + + def seed + COUNT_OF_DAYS.times do |count| + date = Time.now.utc - count.day + create_report(date) + end + end + + private + + attr_reader :project, :last_pipeline + + def create_report(date) + last_pipeline.builds.uniq(&:group_name).each do |build| + ::Ci::DailyBuildGroupReportResult.create( + project: project, + last_pipeline: last_pipeline, + date: date, + ref_path: last_pipeline.source_ref_path, + group_name: build.group_name, + data: { + 'coverage' => rand(20..99) + }, + group: project.group, + default_branch: last_pipeline.default_branch? + ) + rescue ActiveRecord::RecordNotUnique + return false + end + end + end + end end end # :nocov: diff --git a/lib/gitlab/sidekiq_daemon/memory_killer.rb b/lib/gitlab/sidekiq_daemon/memory_killer.rb index cb7d9c6f8a7..ca92fed9c40 100644 --- a/lib/gitlab/sidekiq_daemon/memory_killer.rb +++ b/lib/gitlab/sidekiq_daemon/memory_killer.rb @@ -44,7 +44,8 @@ module Gitlab sidekiq_current_rss: ::Gitlab::Metrics.gauge(:sidekiq_current_rss, 'Current RSS of Sidekiq Worker'), sidekiq_memory_killer_soft_limit_rss: ::Gitlab::Metrics.gauge(:sidekiq_memory_killer_soft_limit_rss, 'Current soft_limit_rss of Sidekiq Worker'), sidekiq_memory_killer_hard_limit_rss: ::Gitlab::Metrics.gauge(:sidekiq_memory_killer_hard_limit_rss, 'Current hard_limit_rss of Sidekiq Worker'), - sidekiq_memory_killer_phase: ::Gitlab::Metrics.gauge(:sidekiq_memory_killer_phase, 'Current phase of Sidekiq Worker') + sidekiq_memory_killer_phase: ::Gitlab::Metrics.gauge(:sidekiq_memory_killer_phase, 'Current phase of Sidekiq Worker'), + sidekiq_memory_killer_running_jobs: ::Gitlab::Metrics.counter(:sidekiq_memory_killer_running_jobs_total, 'Current running jobs when limit was reached') } end @@ -166,6 +167,8 @@ module Gitlab @soft_limit_rss, deadline_exceeded) + running_jobs = fetch_running_jobs + Sidekiq.logger.warn( class: self.class.to_s, pid: pid, @@ -175,9 +178,17 @@ module Gitlab hard_limit_rss: @hard_limit_rss, reason: reason, running_jobs: running_jobs) + + increment_worker_counters(running_jobs, deadline_exceeded) + end + + def increment_worker_counters(running_jobs, deadline_exceeded) + running_jobs.each do |job| + @metrics[:sidekiq_memory_killer_running_jobs].increment( { worker_class: job[:worker_class], deadline_exceeded: deadline_exceeded } ) + end end - def running_jobs + def fetch_running_jobs jobs = [] Gitlab::SidekiqDaemon::Monitor.instance.jobs_mutex.synchronize do jobs = Gitlab::SidekiqDaemon::Monitor.instance.jobs.map do |jid, job| diff --git a/lib/gitlab/sidekiq_logging/logs_jobs.rb b/lib/gitlab/sidekiq_logging/logs_jobs.rb index de08de6632b..3e6e6e05e95 100644 --- a/lib/gitlab/sidekiq_logging/logs_jobs.rb +++ b/lib/gitlab/sidekiq_logging/logs_jobs.rb @@ -12,7 +12,6 @@ module Gitlab # Error information from the previous try is in the payload for # displaying in the Sidekiq UI, but is very confusing in logs! job = job.except( - 'error_backtrace', 'error_class', 'error_message', 'exception.backtrace', 'exception.class', 'exception.message', 'exception.sql' ) diff --git a/lib/gitlab/sidekiq_middleware/server_metrics.rb b/lib/gitlab/sidekiq_middleware/server_metrics.rb index ea2b405c934..180cdad916b 100644 --- a/lib/gitlab/sidekiq_middleware/server_metrics.rb +++ b/lib/gitlab/sidekiq_middleware/server_metrics.rb @@ -35,7 +35,8 @@ module Gitlab sidekiq_redis_requests_total: ::Gitlab::Metrics.counter(:sidekiq_redis_requests_total, 'Redis requests during a Sidekiq job execution'), sidekiq_elasticsearch_requests_total: ::Gitlab::Metrics.counter(:sidekiq_elasticsearch_requests_total, 'Elasticsearch requests during a Sidekiq job execution'), sidekiq_running_jobs: ::Gitlab::Metrics.gauge(:sidekiq_running_jobs, 'Number of Sidekiq jobs running', {}, :all), - sidekiq_concurrency: ::Gitlab::Metrics.gauge(:sidekiq_concurrency, 'Maximum number of Sidekiq jobs', {}, :all) + sidekiq_concurrency: ::Gitlab::Metrics.gauge(:sidekiq_concurrency, 'Maximum number of Sidekiq jobs', {}, :all), + sidekiq_mem_total_bytes: ::Gitlab::Metrics.gauge(:sidekiq_mem_total_bytes, 'Number of bytes allocated for both objects consuming an object slot and objects that required a malloc', {}, :all) } end @@ -123,6 +124,7 @@ module Gitlab @metrics[:sidekiq_redis_requests_duration_seconds].observe(labels, get_redis_time(instrumentation)) @metrics[:sidekiq_elasticsearch_requests_total].increment(labels, get_elasticsearch_calls(instrumentation)) @metrics[:sidekiq_elasticsearch_requests_duration_seconds].observe(labels, get_elasticsearch_time(instrumentation)) + @metrics[:sidekiq_mem_total_bytes].set(labels, get_thread_memory_total_allocations(instrumentation)) with_load_balancing_settings(job) do |settings| load_balancing_labels = { @@ -160,6 +162,10 @@ module Gitlab payload.fetch(:elasticsearch_duration_s, 0) end + def get_thread_memory_total_allocations(payload) + payload.fetch(:mem_total_bytes, 0) + end + def get_elasticsearch_calls(payload) payload.fetch(:elasticsearch_calls, 0) end diff --git a/lib/gitlab/ssh/commit.rb b/lib/gitlab/ssh/commit.rb new file mode 100644 index 00000000000..bfeefc47f13 --- /dev/null +++ b/lib/gitlab/ssh/commit.rb @@ -0,0 +1,24 @@ +# frozen_string_literal: true + +module Gitlab + module Ssh + class Commit < Gitlab::SignedCommit + private + + def signature_class + CommitSignatures::SshSignature + end + + def attributes + signature = ::Gitlab::Ssh::Signature.new(signature_text, signed_text, @commit.committer_email) + + { + commit_sha: @commit.sha, + project: @commit.project, + key_id: signature.signed_by_key&.id, + verification_status: signature.verification_status + } + end + end + end +end diff --git a/lib/gitlab/ssh/signature.rb b/lib/gitlab/ssh/signature.rb index 1a236e1a70c..3b4df9a8d0c 100644 --- a/lib/gitlab/ssh/signature.rb +++ b/lib/gitlab/ssh/signature.rb @@ -26,6 +26,14 @@ module Gitlab end end + def signed_by_key + strong_memoize(:signed_by_key) do + next unless key_fingerprint + + Key.find_by_fingerprint_sha256(key_fingerprint) + end + end + private def all_attributes_present? @@ -61,14 +69,6 @@ module Gitlab def key_fingerprint strong_memoize(:key_fingerprint) { signature&.public_key&.fingerprint } end - - def signed_by_key - strong_memoize(:signed_by_key) do - next unless key_fingerprint - - Key.find_by_fingerprint_sha256(key_fingerprint) - end - end end end end diff --git a/lib/gitlab/string_placeholder_replacer.rb b/lib/gitlab/string_placeholder_replacer.rb index 62621255a53..f77bd8e2d9f 100644 --- a/lib/gitlab/string_placeholder_replacer.rb +++ b/lib/gitlab/string_placeholder_replacer.rb @@ -10,7 +10,7 @@ module Gitlab # placeholder will be returned. def self.replace_string_placeholders(string, placeholder_regex = nil, &block) - return string if string.blank? || placeholder_regex.blank? || !block_given? + return string if string.blank? || placeholder_regex.blank? || !block replace_placeholders(string, placeholder_regex, &block) end diff --git a/lib/gitlab/terraform/state_migration_helper.rb b/lib/gitlab/terraform/state_migration_helper.rb index 04c1cbd0373..e86144f91a5 100644 --- a/lib/gitlab/terraform/state_migration_helper.rb +++ b/lib/gitlab/terraform/state_migration_helper.rb @@ -22,7 +22,7 @@ module Gitlab versions.find_each(batch_size: batch_size) do |version| # rubocop:disable CodeReuse/ActiveRecord version.file.migrate!(store) - yield version if block_given? + yield version if block end end end diff --git a/lib/gitlab/tracking.rb b/lib/gitlab/tracking.rb index 04745bafe7c..3b46b4c5498 100644 --- a/lib/gitlab/tracking.rb +++ b/lib/gitlab/tracking.rb @@ -41,7 +41,7 @@ module Gitlab def snowplow_micro_enabled? Rails.env.development? && Gitlab.config.snowplow_micro.enabled rescue Settingslogic::MissingSetting - Gitlab::Utils.to_boolean(ENV['SNOWPLOW_MICRO_ENABLE']) + false end private diff --git a/lib/gitlab/tracking/destinations/snowplow_micro.rb b/lib/gitlab/tracking/destinations/snowplow_micro.rb index c7a95e88d0b..09480f26106 100644 --- a/lib/gitlab/tracking/destinations/snowplow_micro.rb +++ b/lib/gitlab/tracking/destinations/snowplow_micro.rb @@ -54,7 +54,7 @@ module Gitlab scheme = Gitlab.config.gitlab.https ? 'https' : 'http' "#{scheme}://#{url}" rescue Settingslogic::MissingSetting - ENV['SNOWPLOW_MICRO_URI'] || DEFAULT_URI + DEFAULT_URI end end end diff --git a/lib/gitlab/usage/metrics/instrumentations/base_metric.rb b/lib/gitlab/usage/metrics/instrumentations/base_metric.rb index f76ed1753b2..5e20766b1b4 100644 --- a/lib/gitlab/usage/metrics/instrumentations/base_metric.rb +++ b/lib/gitlab/usage/metrics/instrumentations/base_metric.rb @@ -13,7 +13,7 @@ module Gitlab class << self def available?(&block) - return @metric_available = block if block_given? + return @metric_available = block if block return @metric_available.call if instance_variable_defined?('@metric_available') diff --git a/lib/gitlab/usage/metrics/instrumentations/database_metric.rb b/lib/gitlab/usage/metrics/instrumentations/database_metric.rb index 3b09100f3ff..6dec0349a38 100644 --- a/lib/gitlab/usage/metrics/instrumentations/database_metric.rb +++ b/lib/gitlab/usage/metrics/instrumentations/database_metric.rb @@ -23,42 +23,48 @@ module Gitlab private_constant :IMPLEMENTED_OPERATIONS def start(&block) - return @metric_start&.call unless block_given? + return @metric_start&.call unless block @metric_start = block end def finish(&block) - return @metric_finish&.call unless block_given? + return @metric_finish&.call unless block @metric_finish = block end def relation(&block) - return @metric_relation&.call unless block_given? + return @metric_relation&.call unless block @metric_relation = block end def metric_options(&block) - return @metric_options&.call.to_h unless block_given? + return @metric_options&.call.to_h unless block @metric_options = block end + def timestamp_column(symbol) + @metric_timestamp_column = symbol + end + def operation(symbol, column: nil, &block) raise UnimplementedOperationError unless symbol.in?(IMPLEMENTED_OPERATIONS) @metric_operation = symbol @column = column - @metric_operation_block = block if block_given? + @metric_operation_block = block if block end def cache_start_and_finish_as(cache_key) @cache_key = cache_key end - attr_reader :metric_operation, :metric_relation, :metric_start, :metric_finish, :metric_operation_block, :column, :cache_key + attr_reader :metric_operation, :metric_relation, :metric_start, + :metric_finish, :metric_operation_block, + :column, :cache_key, :metric_timestamp_column end def value @@ -106,7 +112,7 @@ module Gitlab def time_constraints case time_frame when '28d' - monthly_time_range_db_params + monthly_time_range_db_params(column: self.class.metric_timestamp_column) when 'all' {} when 'none' diff --git a/lib/gitlab/usage/metrics/instrumentations/numbers_metric.rb b/lib/gitlab/usage/metrics/instrumentations/numbers_metric.rb index 8504ee368fc..3b20e6ad100 100644 --- a/lib/gitlab/usage/metrics/instrumentations/numbers_metric.rb +++ b/lib/gitlab/usage/metrics/instrumentations/numbers_metric.rb @@ -26,7 +26,7 @@ module Gitlab private_constant :IMPLEMENTED_OPERATIONS def data(&block) - return @metric_data&.call unless block_given? + return @metric_data&.call unless block @metric_data = block end diff --git a/lib/gitlab/usage/time_frame.rb b/lib/gitlab/usage/time_frame.rb index 966a087ee07..39b0855b917 100644 --- a/lib/gitlab/usage/time_frame.rb +++ b/lib/gitlab/usage/time_frame.rb @@ -6,6 +6,7 @@ module Gitlab ALL_TIME_TIME_FRAME_NAME = "all" SEVEN_DAYS_TIME_FRAME_NAME = "7d" TWENTY_EIGHT_DAYS_TIME_FRAME_NAME = "28d" + DEFAULT_TIMESTAMP_COLUMN = :created_at def weekly_time_range { start_date: 7.days.ago.to_date, end_date: Date.current } @@ -17,8 +18,8 @@ module Gitlab # This time range is skewed for batch counter performance. # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/42972 - def monthly_time_range_db_params(column: :created_at) - { column => 30.days.ago..2.days.ago } + def monthly_time_range_db_params(column: nil) + { (column || DEFAULT_TIMESTAMP_COLUMN) => 30.days.ago..2.days.ago } end end end diff --git a/lib/gitlab/usage_data_counters.rb b/lib/gitlab/usage_data_counters.rb index 2a3dcf267c6..224897ed758 100644 --- a/lib/gitlab/usage_data_counters.rb +++ b/lib/gitlab/usage_data_counters.rb @@ -16,7 +16,8 @@ module Gitlab DesignsCounter, KubernetesAgentCounter, DiffsCounter, - ServiceUsageDataCounter + ServiceUsageDataCounter, + MergeRequestWidgetExtensionCounter ].freeze UsageDataCounterError = Class.new(StandardError) diff --git a/lib/gitlab/usage_data_counters/hll_redis_counter.rb b/lib/gitlab/usage_data_counters/hll_redis_counter.rb index 40581bda81b..a5db8ba4dcc 100644 --- a/lib/gitlab/usage_data_counters/hll_redis_counter.rb +++ b/lib/gitlab/usage_data_counters/hll_redis_counter.rb @@ -32,7 +32,6 @@ module Gitlab issues_edit pipeline_authoring quickactions - search user_packages ].freeze @@ -42,7 +41,9 @@ module Gitlab ide_edit importer incident_management_alerts + kubernetes_agent pipeline_authoring + search secure snippets source_code @@ -114,6 +115,10 @@ module Gitlab @categories ||= known_events.map { |event| event[:category] }.uniq end + def categories_collected_from_metrics_definitions + CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS + end + # @param category [String] the category name # @return [Array<String>] list of event names for given category def events_for_category(category) @@ -163,11 +168,7 @@ module Gitlab private def categories_pending_migration - if ::Feature.enabled?(:use_redis_hll_instrumentation_classes) - (categories - CATEGORIES_COLLECTED_FROM_METRICS_DEFINITIONS) - else - categories - end + (categories - categories_collected_from_metrics_definitions) end def track(values, event_name, context: '', time: Time.zone.now) diff --git a/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb b/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb index 9d463e11772..316d9bb3dc1 100644 --- a/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb +++ b/lib/gitlab/usage_data_counters/issue_activity_unique_counter.rb @@ -4,6 +4,8 @@ module Gitlab module UsageDataCounters module IssueActivityUniqueCounter ISSUE_CATEGORY = 'issues_edit' + ISSUE_ACTION = 'perform_issue_action' + ISSUE_LABEL = 'redis_hll_counters.issues_edit.issues_edit_total_unique_counts_monthly' ISSUE_ASSIGNEE_CHANGED = 'g_project_management_issue_assignee_changed' ISSUE_CREATED = 'g_project_management_issue_created' @@ -126,42 +128,48 @@ module Gitlab track_unique_action(ISSUE_TIME_SPENT_CHANGED, author) end - def track_issue_comment_added_action(author:) + def track_issue_comment_added_action(author:, project:) + track_snowplow_action(ISSUE_COMMENT_ADDED, author, project) track_unique_action(ISSUE_COMMENT_ADDED, author) end - def track_issue_comment_edited_action(author:) + def track_issue_comment_edited_action(author:, project:) + track_snowplow_action(ISSUE_COMMENT_EDITED, author, project) track_unique_action(ISSUE_COMMENT_EDITED, author) end - def track_issue_comment_removed_action(author:) + def track_issue_comment_removed_action(author:, project:) + track_snowplow_action(ISSUE_COMMENT_REMOVED, author, project) track_unique_action(ISSUE_COMMENT_REMOVED, author) end - def track_issue_cloned_action(author:) + def track_issue_cloned_action(author:, project:) + track_snowplow_action(ISSUE_CLONED, author, project) track_unique_action(ISSUE_CLONED, author) end private - def track_unique_action(action, author) - return unless author - - Gitlab::UsageDataCounters::HLLRedisCounter.track_event(action, values: author.id) - end - def track_snowplow_action(action, author, project) - return unless Feature.enabled?(:route_hll_to_snowplow_phase2, project&.namespace) + return unless Feature.enabled?(:route_hll_to_snowplow_phase2, project.namespace) return unless author Gitlab::Tracking.event( ISSUE_CATEGORY, - action.to_s, + ISSUE_ACTION, + label: ISSUE_LABEL, + property: action, project: project, - namespace: project&.namespace, + namespace: project.namespace, user: author ) end + + def track_unique_action(action, author) + return unless author + + Gitlab::UsageDataCounters::HLLRedisCounter.track_event(action, values: author.id) + end end end end diff --git a/lib/gitlab/usage_data_counters/known_events/ci_templates.yml b/lib/gitlab/usage_data_counters/known_events/ci_templates.yml index 3b883e505f8..a8f1bab1f20 100644 --- a/lib/gitlab/usage_data_counters/known_events/ci_templates.yml +++ b/lib/gitlab/usage_data_counters/known_events/ci_templates.yml @@ -147,6 +147,10 @@ category: ci_templates redis_slot: ci_templates aggregation: weekly +- name: p_ci_templates_security_fortify_fod_sast + category: ci_templates + redis_slot: ci_templates + aggregation: weekly - name: p_ci_templates_security_sast_iac_latest category: ci_templates redis_slot: ci_templates @@ -639,6 +643,10 @@ category: ci_templates redis_slot: ci_templates aggregation: weekly +- name: p_ci_templates_implicit_security_fortify_fod_sast + category: ci_templates + redis_slot: ci_templates + aggregation: weekly - name: p_ci_templates_implicit_security_sast_iac_latest category: ci_templates redis_slot: ci_templates diff --git a/lib/gitlab/usage_data_counters/known_events/code_review_events.yml b/lib/gitlab/usage_data_counters/known_events/code_review_events.yml index 267b7fe673d..c21b99ba834 100644 --- a/lib/gitlab/usage_data_counters/known_events/code_review_events.yml +++ b/lib/gitlab/usage_data_counters/known_events/code_review_events.yml @@ -299,3 +299,154 @@ redis_slot: code_review category: code_review aggregation: weekly +# MR Widget Extensions +## Test Summary +- name: i_code_review_merge_request_widget_test_summary_view + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_test_summary_full_report_clicked + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_test_summary_expand + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_test_summary_expand_success + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_test_summary_expand_warning + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_test_summary_expand_failed + redis_slot: code_review + category: code_review + aggregation: weekly +## Accessibility +- name: i_code_review_merge_request_widget_accessibility_view + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_accessibility_full_report_clicked + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_accessibility_expand + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_accessibility_expand_success + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_accessibility_expand_warning + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_accessibility_expand_failed + redis_slot: code_review + category: code_review + aggregation: weekly +## Code Quality +- name: i_code_review_merge_request_widget_code_quality_view + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_code_quality_full_report_clicked + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_code_quality_expand + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_code_quality_expand_success + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_code_quality_expand_warning + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_code_quality_expand_failed + redis_slot: code_review + category: code_review + aggregation: weekly +## Terraform +- name: i_code_review_merge_request_widget_terraform_view + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_terraform_full_report_clicked + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_terraform_expand + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_terraform_expand_success + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_terraform_expand_warning + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_terraform_expand_failed + redis_slot: code_review + category: code_review + aggregation: weekly +## Metrics +- name: i_code_review_merge_request_widget_metrics_view + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_metrics_full_report_clicked + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_metrics_expand + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_metrics_expand_success + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_metrics_expand_warning + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_metrics_expand_failed + redis_slot: code_review + category: code_review + aggregation: weekly +## Status Checks +- name: i_code_review_merge_request_widget_status_checks_view + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_status_checks_full_report_clicked + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_status_checks_expand + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_status_checks_expand_success + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_status_checks_expand_warning + redis_slot: code_review + category: code_review + aggregation: weekly +- name: i_code_review_merge_request_widget_status_checks_expand_failed + redis_slot: code_review + category: code_review + aggregation: weekly diff --git a/lib/gitlab/usage_data_counters/known_events/common.yml b/lib/gitlab/usage_data_counters/known_events/common.yml index 88c9f44c165..6c4754ae19f 100644 --- a/lib/gitlab/usage_data_counters/known_events/common.yml +++ b/lib/gitlab/usage_data_counters/known_events/common.yml @@ -1,25 +1,5 @@ --- # Compliance category -- name: g_compliance_dashboard - redis_slot: compliance - category: compliance - aggregation: weekly -- name: g_compliance_audit_events - category: compliance - redis_slot: compliance - aggregation: weekly -- name: i_compliance_audit_events - category: compliance - redis_slot: compliance - aggregation: weekly -- name: i_compliance_credential_inventory - category: compliance - redis_slot: compliance - aggregation: weekly -- name: a_compliance_audit_events_api - category: compliance - redis_slot: compliance - aggregation: weekly - name: g_edit_by_web_ide category: ide_edit redis_slot: edit @@ -44,14 +24,6 @@ category: search redis_slot: search aggregation: weekly -- name: i_search_advanced - category: search - redis_slot: search - aggregation: weekly -- name: i_search_paid - category: search - redis_slot: search - aggregation: weekly - name: wiki_action category: source_code aggregation: daily @@ -142,6 +114,19 @@ redis_slot: incident_management category: incident_management aggregation: weekly +# Incident management linked resources +- name: incident_management_issuable_resource_link_created + redis_slot: incident_management + category: incident_management + aggregation: weekly +- name: incident_management_issuable_resource_link_deleted + redis_slot: incident_management + category: incident_management + aggregation: weekly +- name: incident_management_issuable_resource_link_visited + redis_slot: incident_management + category: incident_management + aggregation: weekly # Incident management alerts - name: incident_management_alert_create_incident redis_slot: incident_management diff --git a/lib/gitlab/usage_data_counters/known_events/kubernetes_agent.yml b/lib/gitlab/usage_data_counters/known_events/kubernetes_agent.yml new file mode 100644 index 00000000000..e1de74a3d07 --- /dev/null +++ b/lib/gitlab/usage_data_counters/known_events/kubernetes_agent.yml @@ -0,0 +1,5 @@ +- name: agent_users_using_ci_tunnel + category: kubernetes_agent + redis_slot: agent + aggregation: weekly + feature_flag: track_agent_users_using_ci_tunnel diff --git a/lib/gitlab/usage_data_counters/known_events/work_items.yml b/lib/gitlab/usage_data_counters/known_events/work_items.yml index 0c9c6026c46..6cd7836ea94 100644 --- a/lib/gitlab/usage_data_counters/known_events/work_items.yml +++ b/lib/gitlab/usage_data_counters/known_events/work_items.yml @@ -9,3 +9,8 @@ redis_slot: users aggregation: weekly feature_flag: track_work_items_activity +- name: users_updating_work_item_dates + category: work_items + redis_slot: users + aggregation: weekly + feature_flag: track_work_items_activity diff --git a/lib/gitlab/usage_data_counters/merge_request_widget_extension_counter.rb b/lib/gitlab/usage_data_counters/merge_request_widget_extension_counter.rb new file mode 100644 index 00000000000..dafc36ab7ce --- /dev/null +++ b/lib/gitlab/usage_data_counters/merge_request_widget_extension_counter.rb @@ -0,0 +1,19 @@ +# frozen_string_literal: true + +module Gitlab + module UsageDataCounters + class MergeRequestWidgetExtensionCounter < BaseCounter + KNOWN_EVENTS = %w[view full_report_clicked expand expand_success expand_warning expand_failed].freeze + PREFIX = 'i_code_review_merge_request_widget' + WIDGETS = %w[accessibility code_quality status_checks terraform test_summary metrics].freeze + + class << self + private + + def known_events + self::WIDGETS.product(self::KNOWN_EVENTS).map { |name_parts| name_parts.join('_count_') } + end + end + end + end +end diff --git a/lib/gitlab/usage_data_counters/work_item_activity_unique_counter.rb b/lib/gitlab/usage_data_counters/work_item_activity_unique_counter.rb index 51bca8b51fe..99b4c082310 100644 --- a/lib/gitlab/usage_data_counters/work_item_activity_unique_counter.rb +++ b/lib/gitlab/usage_data_counters/work_item_activity_unique_counter.rb @@ -5,6 +5,7 @@ module Gitlab module WorkItemActivityUniqueCounter WORK_ITEM_CREATED = 'users_creating_work_items' WORK_ITEM_TITLE_CHANGED = 'users_updating_work_item_title' + WORK_ITEM_DATE_CHANGED = 'users_updating_work_item_dates' class << self def track_work_item_created_action(author:) @@ -15,6 +16,10 @@ module Gitlab track_unique_action(WORK_ITEM_TITLE_CHANGED, author) end + def track_work_item_date_changed_action(author:) + track_unique_action(WORK_ITEM_DATE_CHANGED, author) + end + private def track_unique_action(action, author) diff --git a/lib/gitlab/usage_data_queries.rb b/lib/gitlab/usage_data_queries.rb index fef5cd680cb..c2983779603 100644 --- a/lib/gitlab/usage_data_queries.rb +++ b/lib/gitlab/usage_data_queries.rb @@ -53,7 +53,7 @@ module Gitlab end def alt_usage_data(value = nil, fallback: FALLBACK, &block) - if block_given? + if block { alt_usage_data_block: "non-SQL usage data block" } else { alt_usage_data_value: value } @@ -61,7 +61,7 @@ module Gitlab end def redis_usage_data(counter = nil, &block) - if block_given? + if block { redis_usage_data_block: "non-SQL usage data block" } elsif counter.present? { redis_usage_data_counter: counter.to_s } diff --git a/lib/gitlab/utils/batch_loader.rb b/lib/gitlab/utils/batch_loader.rb new file mode 100644 index 00000000000..67ade0633e2 --- /dev/null +++ b/lib/gitlab/utils/batch_loader.rb @@ -0,0 +1,23 @@ +# frozen_string_literal: true + +module Gitlab + module Utils + module BatchLoader + # Clears batched items under the specified batch key + # https://github.com/exAspArk/batch-loader#batch-key + def self.clear_key(batch_key) + return if ::BatchLoader::Executor.current.nil? + + items_to_clear = ::BatchLoader::Executor.current.items_by_block.select do |k, v| + # The Hash key here is [source_location, batch_key], so we just check k[1] + k[1] == batch_key + end + + items_to_clear.each do |k, v| + ::BatchLoader::Executor.current.items_by_block.delete(k) + ::BatchLoader::Executor.current.loaded_values_by_block.delete(k) + end + end + end + end +end diff --git a/lib/gitlab/utils/link_header_parser.rb b/lib/gitlab/utils/link_header_parser.rb new file mode 100644 index 00000000000..d98c237baf3 --- /dev/null +++ b/lib/gitlab/utils/link_header_parser.rb @@ -0,0 +1,46 @@ +# frozen_string_literal: true + +module Gitlab + module Utils + # Parses Link http headers (as defined in https://www.rfc-editor.org/rfc/rfc5988.txt) + # + # The URI-references with their relation type are extracted and returned as a hash + # Example: + # + # header = '<http://test.org/TheBook/chapter2>; rel="previous", <http://test.org/TheBook/chapter4>; rel="next"' + # + # Gitlab::Utils::LinkHeaderParser.new(header).parse + # { + # previous: { + # uri: #<URI::HTTP http://test.org/TheBook/chapter2> + # }, + # next: { + # uri: #<URI::HTTP http://test.org/TheBook/chapter4> + # } + # } + class LinkHeaderParser + REL_PATTERN = %r{rel="(\w+)"}.freeze + # to avoid parse really long URIs we limit the amount of characters allowed + URI_PATTERN = %r{<(.{1,500})>}.freeze + + def initialize(header) + @header = header + end + + def parse + return {} if @header.blank? + + links = @header.split(',') + result = {} + links.each do |link| + direction = link[REL_PATTERN, 1]&.to_sym + uri = link[URI_PATTERN, 1] + + result[direction] = { uri: URI(uri) } if direction && uri + end + + result + end + end + end +end diff --git a/lib/gitlab/utils/strong_memoize.rb b/lib/gitlab/utils/strong_memoize.rb index 3c954f817a7..50b8428113d 100644 --- a/lib/gitlab/utils/strong_memoize.rb +++ b/lib/gitlab/utils/strong_memoize.rb @@ -21,6 +21,20 @@ module Gitlab # end # end # + # Or like: + # + # include Gitlab::Utils::StrongMemoize + # + # def trigger_from_token + # Ci::Trigger.find_by_token(params[:token].to_s) + # end + # strong_memoize_attr :trigger_from_token + # + # strong_memoize_attr :enabled?, :enabled + # def enabled? + # Feature.enabled?(:some_feature) + # end + # def strong_memoize(name) key = ivar(name) @@ -40,6 +54,34 @@ module Gitlab remove_instance_variable(key) if instance_variable_defined?(key) end + module StrongMemoizeClassMethods + def strong_memoize_attr(method_name, member_name = nil) + member_name ||= method_name + + if method_defined?(method_name) || private_method_defined?(method_name) + StrongMemoize.send( # rubocop:disable GitlabSecurity/PublicSend + :do_strong_memoize, self, method_name, member_name) + else + StrongMemoize.send( # rubocop:disable GitlabSecurity/PublicSend + :queue_strong_memoize, self, method_name, member_name) + end + end + + def method_added(method_name) + super + + if member_name = StrongMemoize + .send(:strong_memoize_queue, self).delete(method_name) # rubocop:disable GitlabSecurity/PublicSend + StrongMemoize.send( # rubocop:disable GitlabSecurity/PublicSend + :do_strong_memoize, self, method_name, member_name) + end + end + end + + def self.included(base) + base.singleton_class.prepend(StrongMemoizeClassMethods) + end + private # Convert `"name"`/`:name` into `:@name` @@ -54,6 +96,37 @@ module Gitlab raise ArgumentError, "Invalid type of '#{name}'" end end + + class <<self + private + + def strong_memoize_queue(klass) + klass.instance_variable_get(:@strong_memoize_queue) || klass.instance_variable_set(:@strong_memoize_queue, {}) + end + + def queue_strong_memoize(klass, method_name, member_name) + strong_memoize_queue(klass)[method_name] = member_name + end + + def do_strong_memoize(klass, method_name, member_name) + method = klass.instance_method(method_name) + + # Methods defined within a class method are already public by default, so we don't need to + # explicitly make them public. + scope = %i[private protected].find do |scope| + klass.send("#{scope}_instance_methods") # rubocop:disable GitlabSecurity/PublicSend + .include? method_name + end + + klass.define_method(method_name) do |*args, &block| + strong_memoize(member_name) do + method.bind_call(self, *args, &block) + end + end + + klass.send(scope, method_name) if scope # rubocop:disable GitlabSecurity/PublicSend + end + end end end end diff --git a/lib/gitlab/utils/usage_data.rb b/lib/gitlab/utils/usage_data.rb index 4d1b234ae54..19bdeefed7e 100644 --- a/lib/gitlab/utils/usage_data.rb +++ b/lib/gitlab/utils/usage_data.rb @@ -196,7 +196,7 @@ module Gitlab def alt_usage_data(value = nil, fallback: FALLBACK, &block) with_duration do - if block_given? + if block yield else value @@ -209,7 +209,7 @@ module Gitlab def redis_usage_data(counter = nil, &block) with_duration do - if block_given? + if block redis_usage_counter(&block) elsif counter.present? redis_usage_data_totals(counter) diff --git a/lib/gitlab/version_info.rb b/lib/gitlab/version_info.rb index f967a12b959..61de003c28d 100644 --- a/lib/gitlab/version_info.rb +++ b/lib/gitlab/version_info.rb @@ -9,7 +9,7 @@ module Gitlab VERSION_REGEX = /(\d+)\.(\d+)\.(\d+)/.freeze def self.parse(str, parse_suffix: false) - if str.is_a?(self.class) + if str.is_a?(self) str elsif str && m = str.match(VERSION_REGEX) VersionInfo.new(m[1].to_i, m[2].to_i, m[3].to_i, parse_suffix ? m.post_match : nil) @@ -62,6 +62,10 @@ module Gitlab end end + def to_json(*_args) + { major: @major, minor: @minor, patch: @patch }.to_json + end + def suffix @suffix ||= @suffix_s.strip.gsub('-', '.pre.').scan(/\d+|[a-z]+/i).map do |s| /^\d+$/ =~ s ? s.to_i : s diff --git a/lib/gitlab/word_diff/segments/diff_hunk.rb b/lib/gitlab/word_diff/segments/diff_hunk.rb index 88b6817676f..13f71f2bc04 100644 --- a/lib/gitlab/word_diff/segments/diff_hunk.rb +++ b/lib/gitlab/word_diff/segments/diff_hunk.rb @@ -16,11 +16,15 @@ module Gitlab end def pos_old - line.match(/\-[0-9]*/)[0].to_i.abs rescue 0 + line.match(/\-[0-9]*/)[0].to_i.abs + rescue StandardError + 0 end def pos_new - line.match(/\+[0-9]*/)[0].to_i.abs rescue 0 + line.match(/\+[0-9]*/)[0].to_i.abs + rescue StandardError + 0 end def first_line? diff --git a/lib/gitlab/zentao/client.rb b/lib/gitlab/zentao/client.rb index 4da4631eecf..0c2b3049670 100644 --- a/lib/gitlab/zentao/client.rb +++ b/lib/gitlab/zentao/client.rb @@ -15,7 +15,11 @@ module Gitlab end def ping - response = fetch_product(zentao_product_xid) rescue {} + response = begin + fetch_product(zentao_product_xid) + rescue StandardError + {} + end active = response['deleted'] == '0' if active { success: true } |