Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab/CODEOWNERS2
-rw-r--r--app/assets/javascripts/issuable/popover/components/mr_popover.vue10
-rw-r--r--app/controllers/projects/prometheus/alerts_controller.rb6
-rw-r--r--app/models/hooks/web_hook_log.rb7
-rw-r--r--app/serializers/prometheus_alert_entity.rb4
-rw-r--r--app/services/metrics/dashboard/base_service.rb1
-rw-r--r--app/services/metrics/dashboard/panel_preview_service.rb1
-rw-r--r--app/services/metrics/dashboard/system_dashboard_service.rb3
-rw-r--r--app/services/service_response.rb18
-rw-r--r--app/services/web_hooks/destroy_service.rb70
-rw-r--r--app/services/web_hooks/log_destroy_service.rb19
-rw-r--r--app/workers/all_queues.yml9
-rw-r--r--app/workers/projects/inactive_projects_deletion_cron_worker.rb67
-rw-r--r--app/workers/web_hooks/destroy_worker.rb9
-rw-r--r--app/workers/web_hooks/log_destroy_worker.rb24
-rw-r--r--config/initializers/1_settings.rb2
-rw-r--r--config/routes/project.rb2
-rw-r--r--config/sidekiq_queues.yml2
-rw-r--r--doc/user/packages/container_registry/index.md2
-rw-r--r--lib/api/api.rb1
-rw-r--r--lib/api/internal/workhorse.rb26
-rw-r--r--lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb41
-rw-r--r--spec/controllers/projects/prometheus/alerts_controller_spec.rb51
-rw-r--r--spec/frontend/issuable/popover/components/__snapshots__/mr_popover_spec.js.snap93
-rw-r--r--spec/frontend/issuable/popover/components/mr_popover_spec.js143
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb38
-rw-r--r--spec/models/hooks/web_hook_log_spec.rb56
-rw-r--r--spec/requests/api/internal/workhorse_spec.rb42
-rw-r--r--spec/serializers/prometheus_alert_entity_spec.rb4
-rw-r--r--spec/services/metrics/dashboard/panel_preview_service_spec.rb1
-rw-r--r--spec/services/projects/destroy_service_spec.rb1
-rw-r--r--spec/services/service_response_spec.rb75
-rw-r--r--spec/services/web_hooks/destroy_service_spec.rb68
-rw-r--r--spec/services/web_hooks/log_destroy_service_spec.rb56
-rw-r--r--spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb42
-rw-r--r--spec/workers/web_hooks/destroy_worker_spec.rb23
-rw-r--r--spec/workers/web_hooks/log_destroy_worker_spec.rb86
-rw-r--r--workhorse/internal/api/api.go45
-rw-r--r--workhorse/internal/upload/multipart_uploader.go12
-rw-r--r--workhorse/internal/upload/rewrite.go40
-rw-r--r--workhorse/internal/upstream/routes.go2
-rw-r--r--workhorse/upload_test.go17
42 files changed, 761 insertions, 460 deletions
diff --git a/.gitlab/CODEOWNERS b/.gitlab/CODEOWNERS
index 89492338cfa..c026b22e3ee 100644
--- a/.gitlab/CODEOWNERS
+++ b/.gitlab/CODEOWNERS
@@ -792,7 +792,7 @@ lib/gitlab/checks/** @proglottis @toon @zj-gitlab
/doc/user/reserved_names.md @fneill
/doc/user/search/advanced_search.md @sselhorn
/doc/user/search/global_search/advanced_search_syntax.md @sselhorn
-/doc/user/search/index.md @aqualls
+/doc/user/search/index.md @sselhorn
/doc/user/shortcuts.md @aqualls
/doc/user/snippets.md @aqualls
/doc/user/ssh.md @eread
diff --git a/app/assets/javascripts/issuable/popover/components/mr_popover.vue b/app/assets/javascripts/issuable/popover/components/mr_popover.vue
index 8573dd702a6..92994809362 100644
--- a/app/assets/javascripts/issuable/popover/components/mr_popover.vue
+++ b/app/assets/javascripts/issuable/popover/components/mr_popover.vue
@@ -1,6 +1,6 @@
<script>
/* eslint-disable @gitlab/vue-require-i18n-strings */
-import { GlBadge, GlPopover, GlDeprecatedSkeletonLoading as GlSkeletonLoading } from '@gitlab/ui';
+import { GlBadge, GlPopover, GlSkeletonLoader } from '@gitlab/ui';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import timeagoMixin from '~/vue_shared/mixins/timeago';
import { mrStates, humanMRStates } from '../constants';
@@ -12,7 +12,7 @@ export default {
components: {
GlBadge,
GlPopover,
- GlSkeletonLoading,
+ GlSkeletonLoader,
CiIcon,
},
mixins: [timeagoMixin],
@@ -93,9 +93,9 @@ export default {
<template>
<gl-popover :target="target" boundary="viewport" placement="top" show>
<div class="mr-popover">
- <div v-if="$apollo.queries.mergeRequest.loading">
- <gl-skeleton-loading :lines="1" class="animation-container-small mt-1" />
- </div>
+ <gl-skeleton-loader v-if="$apollo.queries.mergeRequest.loading" :height="15">
+ <rect width="250" height="15" rx="4" />
+ </gl-skeleton-loader>
<div v-else-if="showDetails" class="d-flex align-items-center justify-content-between">
<div class="d-inline-flex align-items-center">
<gl-badge class="gl-mr-3" :variant="badgeVariant">
diff --git a/app/controllers/projects/prometheus/alerts_controller.rb b/app/controllers/projects/prometheus/alerts_controller.rb
index de0f9b25f14..c3dc17694d9 100644
--- a/app/controllers/projects/prometheus/alerts_controller.rb
+++ b/app/controllers/projects/prometheus/alerts_controller.rb
@@ -14,15 +14,11 @@ module Projects
prepend_before_action :repository, :project_without_auth, only: [:notify]
before_action :authorize_read_prometheus_alerts!, except: [:notify]
- before_action :alert, only: [:show, :metrics_dashboard]
+ before_action :alert, only: [:metrics_dashboard]
feature_category :incident_management
urgency :low
- def show
- render json: serialize_as_json(alert)
- end
-
def notify
token = extract_alert_manager_token(request)
result = notify_service.execute(token)
diff --git a/app/models/hooks/web_hook_log.rb b/app/models/hooks/web_hook_log.rb
index 8c0565e4a38..a95dd0473b6 100644
--- a/app/models/hooks/web_hook_log.rb
+++ b/app/models/hooks/web_hook_log.rb
@@ -26,6 +26,13 @@ class WebHookLog < ApplicationRecord
.order(created_at: :desc)
end
+ # Delete a batch of log records. Returns true if there may be more remaining.
+ def self.delete_batch_for(web_hook, batch_size:)
+ raise ArgumentError, 'batch_size is too small' if batch_size < 1
+
+ where(web_hook: web_hook).limit(batch_size).delete_all == batch_size
+ end
+
def success?
response_status =~ /^2/
end
diff --git a/app/serializers/prometheus_alert_entity.rb b/app/serializers/prometheus_alert_entity.rb
index 92905d2b389..fb25889e4db 100644
--- a/app/serializers/prometheus_alert_entity.rb
+++ b/app/serializers/prometheus_alert_entity.rb
@@ -13,10 +13,6 @@ class PrometheusAlertEntity < Grape::Entity
prometheus_alert.computed_operator
end
- expose :alert_path do |prometheus_alert|
- project_prometheus_alert_path(prometheus_alert.project, prometheus_alert.prometheus_metric_id, environment_id: prometheus_alert.environment.id, format: :json)
- end
-
private
alias_method :prometheus_alert, :object
diff --git a/app/services/metrics/dashboard/base_service.rb b/app/services/metrics/dashboard/base_service.rb
index 5be8ae62548..5975fa28b0b 100644
--- a/app/services/metrics/dashboard/base_service.rb
+++ b/app/services/metrics/dashboard/base_service.rb
@@ -14,7 +14,6 @@ module Metrics
STAGES::VariableEndpointInserter,
STAGES::PanelIdsInserter,
STAGES::TrackPanelType,
- STAGES::AlertsInserter,
STAGES::UrlValidator
].freeze
diff --git a/app/services/metrics/dashboard/panel_preview_service.rb b/app/services/metrics/dashboard/panel_preview_service.rb
index 02dd908e229..260b49a5b19 100644
--- a/app/services/metrics/dashboard/panel_preview_service.rb
+++ b/app/services/metrics/dashboard/panel_preview_service.rb
@@ -10,7 +10,6 @@ module Metrics
::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
::Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
::Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
::Gitlab::Metrics::Dashboard::Stages::UrlValidator
].freeze
diff --git a/app/services/metrics/dashboard/system_dashboard_service.rb b/app/services/metrics/dashboard/system_dashboard_service.rb
index 29b8f23f40d..1bd31b2ba21 100644
--- a/app/services/metrics/dashboard/system_dashboard_service.rb
+++ b/app/services/metrics/dashboard/system_dashboard_service.rb
@@ -17,8 +17,7 @@ module Metrics
STAGES::CustomMetricsDetailsInserter,
STAGES::MetricEndpointInserter,
STAGES::VariableEndpointInserter,
- STAGES::PanelIdsInserter,
- STAGES::AlertsInserter
+ STAGES::PanelIdsInserter
].freeze
class << self
diff --git a/app/services/service_response.rb b/app/services/service_response.rb
index 6bc394d2ae2..c7ab75a4426 100644
--- a/app/services/service_response.rb
+++ b/app/services/service_response.rb
@@ -18,6 +18,24 @@ class ServiceResponse
self.http_status = http_status
end
+ def track_exception(as: StandardError, **extra_data)
+ if error?
+ e = as.new(message)
+ Gitlab::ErrorTracking.track_exception(e, extra_data)
+ end
+
+ self
+ end
+
+ def track_and_raise_exception(as: StandardError, **extra_data)
+ if error?
+ e = as.new(message)
+ Gitlab::ErrorTracking.track_and_raise_exception(e, extra_data)
+ end
+
+ self
+ end
+
def [](key)
to_h[key]
end
diff --git a/app/services/web_hooks/destroy_service.rb b/app/services/web_hooks/destroy_service.rb
index 58117985b56..ecb530f0d2a 100644
--- a/app/services/web_hooks/destroy_service.rb
+++ b/app/services/web_hooks/destroy_service.rb
@@ -2,77 +2,27 @@
module WebHooks
class DestroyService
- include BaseServiceUtility
-
- BATCH_SIZE = 1000
- LOG_COUNT_THRESHOLD = 10000
-
- DestroyError = Class.new(StandardError)
-
- attr_accessor :current_user, :web_hook
+ attr_accessor :current_user
def initialize(current_user)
@current_user = current_user
end
+ # Destroy the hook immediately, schedule the logs for deletion
def execute(web_hook)
- @web_hook = web_hook
-
- async = false
- # For a better user experience, it's better if the Web hook is
- # destroyed right away without waiting for Sidekiq. However, if
- # there are a lot of Web hook logs, we will need more time to
- # clean them up, so schedule a Sidekiq job to do this.
- if needs_async_destroy?
- Gitlab::AppLogger.info("User #{current_user&.id} scheduled a deletion of hook ID #{web_hook.id}")
- async_destroy(web_hook)
- async = true
- else
- sync_destroy(web_hook)
- end
-
- success({ async: async })
- end
-
- def sync_destroy(web_hook)
- @web_hook = web_hook
+ hook_id = web_hook.id
- delete_web_hook_logs
- result = web_hook.destroy
+ if web_hook.destroy
+ WebHooks::LogDestroyWorker.perform_async({ 'hook_id' => hook_id })
+ Gitlab::AppLogger.info("User #{current_user&.id} scheduled a deletion of logs for hook ID #{hook_id}")
- if result
- success({ async: false })
+ ServiceResponse.success(payload: { async: false })
else
- error("Unable to destroy #{web_hook.model_name.human}")
+ ServiceResponse.error(message: "Unable to destroy #{web_hook.model_name.human}")
end
end
- private
-
- def async_destroy(web_hook)
- WebHooks::DestroyWorker.perform_async(current_user.id, web_hook.id)
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def needs_async_destroy?
- web_hook.web_hook_logs.limit(LOG_COUNT_THRESHOLD).count == LOG_COUNT_THRESHOLD
- end
- # rubocop: enable CodeReuse/ActiveRecord
-
- def delete_web_hook_logs
- loop do
- count = delete_web_hook_logs_in_batches
- break if count < BATCH_SIZE
- end
- end
-
- # rubocop: disable CodeReuse/ActiveRecord
- def delete_web_hook_logs_in_batches
- # We can't use EachBatch because that does an ORDER BY id, which can
- # easily time out. We don't actually care about ordering when
- # we are deleting these rows.
- web_hook.web_hook_logs.limit(BATCH_SIZE).delete_all
- end
- # rubocop: enable CodeReuse/ActiveRecord
+ # Backwards compatibility with WebHooks::DestroyWorker
+ alias_method :sync_destroy, :execute
end
end
diff --git a/app/services/web_hooks/log_destroy_service.rb b/app/services/web_hooks/log_destroy_service.rb
new file mode 100644
index 00000000000..8a5d214a95e
--- /dev/null
+++ b/app/services/web_hooks/log_destroy_service.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module WebHooks
+ class LogDestroyService
+ BATCH_SIZE = 1000
+
+ def initialize(web_hook_id)
+ @web_hook_id = web_hook_id
+ end
+
+ def execute
+ next while WebHookLog.delete_batch_for(@web_hook_id, batch_size: BATCH_SIZE)
+
+ ServiceResponse.success
+ rescue StandardError => ex
+ ServiceResponse.error(message: ex.message)
+ end
+ end
+end
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index e7ea39c913e..87a621980c7 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -3056,6 +3056,15 @@
:weight: 1
:idempotent: true
:tags: []
+- :name: web_hooks_log_destroy
+ :worker_name: WebHooks::LogDestroyWorker
+ :feature_category: :integrations
+ :has_external_dependencies:
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent: true
+ :tags: []
- :name: web_hooks_log_execution
:worker_name: WebHooks::LogExecutionWorker
:feature_category: :integrations
diff --git a/app/workers/projects/inactive_projects_deletion_cron_worker.rb b/app/workers/projects/inactive_projects_deletion_cron_worker.rb
index 2c3f4191502..312d161cd9b 100644
--- a/app/workers/projects/inactive_projects_deletion_cron_worker.rb
+++ b/app/workers/projects/inactive_projects_deletion_cron_worker.rb
@@ -9,34 +9,53 @@ module Projects
idempotent!
data_consistency :always
feature_category :compliance_management
+ urgency :low
- INTERVAL = 2.seconds.to_i
+ # This cron worker is executed at an interval of 10 minutes.
+ # Maximum run time is kept as 4 minutes to avoid breaching maximum allowed execution latency of 5 minutes.
+ MAX_RUN_TIME = 4.minutes
+ LAST_PROCESSED_INACTIVE_PROJECT_REDIS_KEY = 'last_processed_inactive_project_id'
+
+ TimeoutError = Class.new(StandardError)
def perform
return unless ::Gitlab::CurrentSettings.delete_inactive_projects?
+ @start_time ||= ::Gitlab::Metrics::System.monotonic_time
admin_user = User.admins.active.first
return unless admin_user
notified_inactive_projects = Gitlab::InactiveProjectsDeletionWarningTracker.notified_projects
- Project.inactive.without_deleted.find_each(batch_size: 100).with_index do |project, index| # rubocop: disable CodeReuse/ActiveRecord
- next unless Feature.enabled?(:inactive_projects_deletion, project.root_namespace)
+ project_id = last_processed_project_id
+
+ Project.where('projects.id > ?', project_id).each_batch(of: 100) do |batch| # rubocop: disable CodeReuse/ActiveRecord
+ inactive_projects = batch.inactive.without_deleted
- delay = index * INTERVAL
+ inactive_projects.each do |project|
+ next unless Feature.enabled?(:inactive_projects_deletion, project.root_namespace)
- with_context(project: project, user: admin_user) do
- deletion_warning_email_sent_on = notified_inactive_projects["project:#{project.id}"]
+ with_context(project: project, user: admin_user) do
+ deletion_warning_email_sent_on = notified_inactive_projects["project:#{project.id}"]
+
+ if send_deletion_warning_email?(deletion_warning_email_sent_on, project)
+ send_notification(project, admin_user)
+ elsif deletion_warning_email_sent_on && delete_due_to_inactivity?(deletion_warning_email_sent_on)
+ Gitlab::InactiveProjectsDeletionWarningTracker.new(project.id).reset
+ delete_project(project, admin_user)
+ end
+ end
- if send_deletion_warning_email?(deletion_warning_email_sent_on, project)
- send_notification(delay, project, admin_user)
- elsif deletion_warning_email_sent_on && delete_due_to_inactivity?(deletion_warning_email_sent_on)
- Gitlab::InactiveProjectsDeletionWarningTracker.new(project.id).reset
- delete_project(project, admin_user)
+ if over_time?
+ save_last_processed_project_id(project.id)
+ raise TimeoutError
end
end
end
+ reset_last_processed_project_id
+ rescue TimeoutError
+ # no-op
end
private
@@ -64,8 +83,30 @@ module Projects
::Projects::DestroyService.new(project, user, {}).async_execute
end
- def send_notification(delay, project, user)
- ::Projects::InactiveProjectsDeletionNotificationWorker.perform_in(delay, project.id, deletion_date)
+ def send_notification(project, user)
+ ::Projects::InactiveProjectsDeletionNotificationWorker.perform_async(project.id, deletion_date)
+ end
+
+ def over_time?
+ (::Gitlab::Metrics::System.monotonic_time - @start_time) > MAX_RUN_TIME
+ end
+
+ def save_last_processed_project_id(project_id)
+ Gitlab::Redis::Cache.with do |redis|
+ redis.set(LAST_PROCESSED_INACTIVE_PROJECT_REDIS_KEY, project_id)
+ end
+ end
+
+ def last_processed_project_id
+ Gitlab::Redis::Cache.with do |redis|
+ redis.get(LAST_PROCESSED_INACTIVE_PROJECT_REDIS_KEY).to_i
+ end
+ end
+
+ def reset_last_processed_project_id
+ Gitlab::Redis::Cache.with do |redis|
+ redis.del(LAST_PROCESSED_INACTIVE_PROJECT_REDIS_KEY)
+ end
end
end
end
diff --git a/app/workers/web_hooks/destroy_worker.rb b/app/workers/web_hooks/destroy_worker.rb
index 822a1e770d7..8f9b194f88a 100644
--- a/app/workers/web_hooks/destroy_worker.rb
+++ b/app/workers/web_hooks/destroy_worker.rb
@@ -4,6 +4,8 @@ module WebHooks
class DestroyWorker
include ApplicationWorker
+ DestroyError = Class.new(StandardError)
+
data_consistency :always
sidekiq_options retry: 3
feature_category :integrations
@@ -19,12 +21,7 @@ module WebHooks
result = ::WebHooks::DestroyService.new(user).sync_destroy(hook)
- return result if result[:status] == :success
-
- e = ::WebHooks::DestroyService::DestroyError.new(result[:message])
- Gitlab::ErrorTracking.track_exception(e, web_hook_id: hook.id)
-
- raise e
+ result.track_and_raise_exception(as: DestroyError, web_hook_id: hook.id)
end
end
end
diff --git a/app/workers/web_hooks/log_destroy_worker.rb b/app/workers/web_hooks/log_destroy_worker.rb
new file mode 100644
index 00000000000..9ea5c70e416
--- /dev/null
+++ b/app/workers/web_hooks/log_destroy_worker.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module WebHooks
+ class LogDestroyWorker
+ include ApplicationWorker
+
+ DestroyError = Class.new(StandardError)
+
+ data_consistency :always
+ feature_category :integrations
+ urgency :low
+
+ idempotent!
+
+ def perform(params = {})
+ hook_id = params['hook_id']
+ return unless hook_id
+
+ result = ::WebHooks::LogDestroyService.new(hook_id).execute
+
+ result.track_and_raise_exception(as: DestroyError, web_hook_id: hook_id)
+ end
+ end
+end
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 0bcaf0a1454..b38f4b306b3 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -627,7 +627,7 @@ Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_wo
Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker']['cron'] ||= '2/17 * * * *'
Settings.cron_jobs['projects_schedule_refresh_build_artifacts_size_statistics_worker']['job_class'] = 'Projects::ScheduleRefreshBuildArtifactsSizeStatisticsWorker'
Settings.cron_jobs['inactive_projects_deletion_cron_worker'] ||= Settingslogic.new({})
-Settings.cron_jobs['inactive_projects_deletion_cron_worker']['cron'] ||= '0 1 * * *'
+Settings.cron_jobs['inactive_projects_deletion_cron_worker']['cron'] ||= '*/10 * * * *'
Settings.cron_jobs['inactive_projects_deletion_cron_worker']['job_class'] = 'Projects::InactiveProjectsDeletionCronWorker'
Settings.cron_jobs['loose_foreign_keys_cleanup_worker'] ||= Settingslogic.new({})
Settings.cron_jobs['loose_foreign_keys_cleanup_worker']['cron'] ||= '*/1 * * * *'
diff --git a/config/routes/project.rb b/config/routes/project.rb
index e8c7333b261..a4a3e55f711 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -522,7 +522,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
end
namespace :prometheus do
- resources :alerts, constraints: { id: /\d+/ }, only: [:show] do # rubocop: disable Cop/PutProjectRoutesUnderScope
+ resources :alerts, constraints: { id: /\d+/ }, only: [] do # rubocop: disable Cop/PutProjectRoutesUnderScope
post :notify, on: :collection # rubocop:todo Cop/PutProjectRoutesUnderScope
member do
get :metrics_dashboard # rubocop:todo Cop/PutProjectRoutesUnderScope
diff --git a/config/sidekiq_queues.yml b/config/sidekiq_queues.yml
index f5d9708b55b..9f2eede1c47 100644
--- a/config/sidekiq_queues.yml
+++ b/config/sidekiq_queues.yml
@@ -457,6 +457,8 @@
- 1
- - web_hooks_destroy
- 1
+- - web_hooks_log_destroy
+ - 1
- - web_hooks_log_execution
- 1
- - wikis_git_garbage_collect
diff --git a/doc/user/packages/container_registry/index.md b/doc/user/packages/container_registry/index.md
index 0403bbae2b5..ae64c419632 100644
--- a/doc/user/packages/container_registry/index.md
+++ b/doc/user/packages/container_registry/index.md
@@ -113,7 +113,7 @@ To authenticate, you can use:
Both of these require the minimum scope to be:
- For read (pull) access, `read_registry`.
-- For write (push) access, `write_registry`.
+- For write (push) access, `write_registry` & `read_registry`.
To authenticate, run the `docker` command. For example:
diff --git a/lib/api/api.rb b/lib/api/api.rb
index 847199ef274..c20ca768d48 100644
--- a/lib/api/api.rb
+++ b/lib/api/api.rb
@@ -315,6 +315,7 @@ module API
mount ::API::Internal::Kubernetes
mount ::API::Internal::MailRoom
mount ::API::Internal::ContainerRegistry::Migration
+ mount ::API::Internal::Workhorse
version 'v3', using: :path do
# Although the following endpoints are kept behind V3 namespace,
diff --git a/lib/api/internal/workhorse.rb b/lib/api/internal/workhorse.rb
new file mode 100644
index 00000000000..6a7bc335fec
--- /dev/null
+++ b/lib/api/internal/workhorse.rb
@@ -0,0 +1,26 @@
+# frozen_string_literal: true
+
+module API
+ module Internal
+ class Workhorse < ::API::Base
+ feature_category :not_owned # rubocop:todo Gitlab/AvoidFeatureCategoryNotOwned
+
+ before do
+ verify_workhorse_api!
+ content_type Gitlab::Workhorse::INTERNAL_API_CONTENT_TYPE
+ end
+
+ namespace 'internal' do
+ namespace 'workhorse' do
+ post 'authorize_upload' do
+ authenticator = Gitlab::Auth::RequestAuthenticator.new(request)
+ unauthorized! unless authenticator.find_authenticated_requester([:api])
+
+ status 200
+ { TempPath: File.join(::Gitlab.config.uploads.storage_path, 'uploads/tmp') }
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb b/lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb
deleted file mode 100644
index 38736158c3b..00000000000
--- a/lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'set'
-
-module Gitlab
- module Metrics
- module Dashboard
- module Stages
- class AlertsInserter < BaseStage
- include ::Gitlab::Utils::StrongMemoize
-
- def transform!
- return if metrics_with_alerts.empty?
-
- for_metrics do |metric|
- next unless metrics_with_alerts.include?(metric[:metric_id])
-
- metric[:alert_path] = alert_path(metric[:metric_id], project, params[:environment])
- end
- end
-
- private
-
- def metrics_with_alerts
- strong_memoize(:metrics_with_alerts) do
- alerts = ::Projects::Prometheus::AlertsFinder
- .new(project: project, environment: params[:environment])
- .execute
-
- Set.new(alerts.map(&:prometheus_metric_id))
- end
- end
-
- def alert_path(metric_id, project, environment)
- ::Gitlab::Routing.url_helpers.project_prometheus_alert_path(project, metric_id, environment_id: environment.id, format: :json)
- end
- end
- end
- end
- end
-end
diff --git a/spec/controllers/projects/prometheus/alerts_controller_spec.rb b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
index af40eef6dda..2c2c8180143 100644
--- a/spec/controllers/projects/prometheus/alerts_controller_spec.rb
+++ b/spec/controllers/projects/prometheus/alerts_controller_spec.rb
@@ -53,57 +53,6 @@ RSpec.describe Projects::Prometheus::AlertsController do
end
end
- describe 'GET #show' do
- let(:alert) do
- create(:prometheus_alert,
- :with_runbook_url,
- project: project,
- environment: environment,
- prometheus_metric: metric)
- end
-
- def make_request(opts = {})
- get :show, params: request_params(
- opts,
- id: alert.prometheus_metric_id,
- environment_id: environment
- )
- end
-
- context 'when alert does not exist' do
- it 'returns not_found' do
- make_request(id: 0)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- context 'when alert exists' do
- let(:alert_params) do
- {
- 'id' => alert.id,
- 'title' => alert.title,
- 'query' => alert.query,
- 'operator' => alert.computed_operator,
- 'threshold' => alert.threshold,
- 'runbook_url' => alert.runbook_url,
- 'alert_path' => alert_path(alert)
- }
- end
-
- it 'renders the alert' do
- make_request
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to include(alert_params)
- end
-
- it_behaves_like 'unprivileged'
- it_behaves_like 'project non-specific environment', :not_found
- it_behaves_like 'project non-specific metric', :not_found
- end
- end
-
describe 'POST #notify' do
let(:alert_1) { build(:alert_management_alert, :prometheus, project: project) }
let(:alert_2) { build(:alert_management_alert, :prometheus, project: project) }
diff --git a/spec/frontend/issuable/popover/components/__snapshots__/mr_popover_spec.js.snap b/spec/frontend/issuable/popover/components/__snapshots__/mr_popover_spec.js.snap
deleted file mode 100644
index a03d8bf5bf4..00000000000
--- a/spec/frontend/issuable/popover/components/__snapshots__/mr_popover_spec.js.snap
+++ /dev/null
@@ -1,93 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`MR Popover loaded state matches the snapshot 1`] = `
-<gl-popover-stub
- boundary="viewport"
- cssclasses=""
- placement="top"
- show=""
- target=""
->
- <div
- class="mr-popover"
- >
- <div
- class="d-flex align-items-center justify-content-between"
- >
- <div
- class="d-inline-flex align-items-center"
- >
- <gl-badge-stub
- class="gl-mr-3"
- size="md"
- variant="success"
- >
-
- Open
-
- </gl-badge-stub>
-
- <span
- class="gl-text-secondary"
- >
- Opened
- <time>
- just now
- </time>
- </span>
- </div>
-
- <ci-icon-stub
- cssclasses=""
- size="16"
- status="[object Object]"
- />
- </div>
-
- <h5
- class="my-2"
- >
- Updated Title
- </h5>
-
- <div
- class="gl-text-secondary"
- >
-
- foo/bar!1
-
- </div>
- </div>
-</gl-popover-stub>
-`;
-
-exports[`MR Popover shows skeleton-loader while apollo is loading 1`] = `
-<gl-popover-stub
- boundary="viewport"
- cssclasses=""
- placement="top"
- show=""
- target=""
->
- <div
- class="mr-popover"
- >
- <div>
- <gl-skeleton-loading-stub
- class="animation-container-small mt-1"
- lines="1"
- />
- </div>
-
- <!---->
-
- <div
- class="gl-text-secondary"
- >
-
- foo/bar!1
-
- </div>
- </div>
-</gl-popover-stub>
-`;
diff --git a/spec/frontend/issuable/popover/components/mr_popover_spec.js b/spec/frontend/issuable/popover/components/mr_popover_spec.js
index e66c6e22947..5fdd1e6e8fc 100644
--- a/spec/frontend/issuable/popover/components/mr_popover_spec.js
+++ b/spec/frontend/issuable/popover/components/mr_popover_spec.js
@@ -1,80 +1,119 @@
+import { GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { nextTick } from 'vue';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
import MRPopover from '~/issuable/popover/components/mr_popover.vue';
+import mergeRequestQuery from '~/issuable/popover/queries/merge_request.query.graphql';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
describe('MR Popover', () => {
let wrapper;
- beforeEach(() => {
+ Vue.use(VueApollo);
+
+ const mrQueryResponse = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: '1',
+ mergeRequest: {
+ __typename: 'Merge Request',
+ id: 'gid://gitlab/Merge_Request/1',
+ createdAt: '2020-07-01T04:08:01Z',
+ state: 'opened',
+ title: 'MR title',
+ headPipeline: {
+ id: '1',
+ detailedStatus: {
+ id: '1',
+ icon: 'status_success',
+ group: 'success',
+ },
+ },
+ },
+ },
+ },
+ };
+
+ const mrQueryResponseWithoutDetailedStatus = {
+ data: {
+ project: {
+ __typename: 'Project',
+ id: '1',
+ mergeRequest: {
+ __typename: 'Merge Request',
+ id: 'gid://gitlab/Merge_Request/1',
+ createdAt: '2020-07-01T04:08:01Z',
+ state: 'opened',
+ title: 'MR title',
+ headPipeline: {
+ id: '1',
+ detailedStatus: null,
+ },
+ },
+ },
+ },
+ };
+
+ const mountComponent = ({
+ queryResponse = jest.fn().mockResolvedValue(mrQueryResponse),
+ } = {}) => {
wrapper = shallowMount(MRPopover, {
+ apolloProvider: createMockApollo([[mergeRequestQuery, queryResponse]]),
propsData: {
target: document.createElement('a'),
projectPath: 'foo/bar',
iid: '1',
- cachedTitle: 'MR Title',
- },
- mocks: {
- $apollo: {
- queries: {
- mergeRequest: {
- loading: false,
- },
- },
- },
+ cachedTitle: 'Cached Title',
},
});
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
});
- it('shows skeleton-loader while apollo is loading', async () => {
- wrapper.vm.$apollo.queries.mergeRequest.loading = true;
+ it('shows skeleton-loader while apollo is loading', () => {
+ mountComponent();
- await nextTick();
- expect(wrapper.element).toMatchSnapshot();
+ expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(true);
});
- describe('loaded state', () => {
- it('matches the snapshot', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- mergeRequest: {
- title: 'Updated Title',
- state: 'opened',
- createdAt: new Date(),
- headPipeline: {
- detailedStatus: {
- group: 'success',
- status: 'status_success',
- },
- },
- },
- });
+ describe('when loaded', () => {
+ beforeEach(() => {
+ mountComponent();
+ return waitForPromises();
+ });
- await nextTick();
- expect(wrapper.element).toMatchSnapshot();
+ it('shows opened time', () => {
+ expect(wrapper.text()).toContain('Opened 4 days ago');
});
- it('does not show CI Icon if there is no pipeline data', async () => {
- // setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
- // eslint-disable-next-line no-restricted-syntax
- wrapper.setData({
- mergeRequest: {
- state: 'opened',
- headPipeline: null,
- stateHumanName: 'Open',
- title: 'Merge Request Title',
- createdAt: new Date(),
- },
- });
+ it('shows title', () => {
+ expect(wrapper.find('h5').text()).toBe(mrQueryResponse.data.project.mergeRequest.title);
+ });
- await nextTick();
- expect(wrapper.find(CiIcon).exists()).toBe(false);
+ it('shows reference', () => {
+ expect(wrapper.text()).toContain('foo/bar!1');
+ });
+
+ it('shows CI Icon if there is pipeline data', async () => {
+ expect(wrapper.findComponent(CiIcon).exists()).toBe(true);
+ });
+ });
+
+ describe('without detailed status', () => {
+ beforeEach(() => {
+ mountComponent({
+ queryResponse: jest.fn().mockResolvedValue(mrQueryResponseWithoutDetailedStatus),
+ });
+ return waitForPromises();
});
- it('falls back to cached MR title when request fails', async () => {
- await nextTick();
- expect(wrapper.text()).toContain('MR Title');
+ it('does not show CI icon if there is no pipeline data', async () => {
+ expect(wrapper.findComponent(CiIcon).exists()).toBe(false);
});
});
});
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index 14a4c01fce3..52908a0b339 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -16,7 +16,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
Gitlab::Metrics::Dashboard::Stages::CustomMetricsDetailsInserter,
Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
- Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
Gitlab::Metrics::Dashboard::Stages::UrlValidator
]
@@ -118,43 +117,6 @@ RSpec.describe Gitlab::Metrics::Dashboard::Processor do
end
end
- context 'when the dashboard references persisted metrics with alerts' do
- let!(:alert) do
- create(
- :prometheus_alert,
- environment: environment,
- project: project,
- prometheus_metric: persisted_metric
- )
- end
-
- shared_examples_for 'has saved alerts' do
- it 'includes an alert path' do
- target_metric = all_metrics.find { |metric| metric[:metric_id] == persisted_metric.id }
-
- expect(target_metric).to be_a Hash
- expect(target_metric).to include(:alert_path)
- expect(target_metric[:alert_path]).to include(
- project.path,
- persisted_metric.id.to_s,
- environment.id.to_s
- )
- end
- end
-
- context 'that are shared across projects' do
- let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
-
- it_behaves_like 'has saved alerts'
- end
-
- context 'when the project has associated metrics' do
- let!(:persisted_metric) { create(:prometheus_metric, project: project, group: :business) }
-
- it_behaves_like 'has saved alerts'
- end
- end
-
context 'when there are no alerts' do
let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
diff --git a/spec/models/hooks/web_hook_log_spec.rb b/spec/models/hooks/web_hook_log_spec.rb
index 9cfbb14e087..e1fea3318f6 100644
--- a/spec/models/hooks/web_hook_log_spec.rb
+++ b/spec/models/hooks/web_hook_log_spec.rb
@@ -48,6 +48,62 @@ RSpec.describe WebHookLog do
end
end
+ describe '.delete_batch_for' do
+ let(:hook) { create(:project_hook) }
+
+ before do
+ create_list(:web_hook_log, 3, web_hook: hook)
+ create_list(:web_hook_log, 3)
+ end
+
+ subject { described_class.delete_batch_for(hook, batch_size: batch_size) }
+
+ shared_examples 'deletes batch of web hook logs' do
+ it { is_expected.to be(batch_size <= 3) }
+
+ it 'deletes min(batch_size, total) records' do
+ deleted = [batch_size, 3].min
+
+ expect { subject }.to change(described_class, :count).by(-deleted)
+ end
+ end
+
+ context 'when the batch size is less than one' do
+ let(:batch_size) { 0 }
+
+ it 'raises an argument error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
+ context 'when the batch size is smaller than the total' do
+ let(:batch_size) { 2 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ context 'when the batch size is equal to the total' do
+ let(:batch_size) { 3 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ context 'when the batch size is greater than the total' do
+ let(:batch_size) { 1000 }
+
+ include_examples 'deletes batch of web hook logs'
+ end
+
+ it 'does not loop forever' do
+ batches = 0
+ batches += 1 while described_class.delete_batch_for(hook, batch_size: 1)
+
+ expect(hook.web_hook_logs).to be_none
+ expect(described_class.count).to eq 3
+ expect(batches).to eq 3 # true three times, stops at first false
+ end
+ end
+
describe '#success?' do
let(:web_hook_log) { build(:web_hook_log, response_status: status) }
diff --git a/spec/requests/api/internal/workhorse_spec.rb b/spec/requests/api/internal/workhorse_spec.rb
new file mode 100644
index 00000000000..e6836d8fef5
--- /dev/null
+++ b/spec/requests/api/internal/workhorse_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Internal::Workhorse do
+ include WorkhorseHelpers
+
+ context '/authorize_upload' do
+ let_it_be(:user) { create(:user) }
+
+ let(:headers) { {} }
+
+ subject { post(api('/internal/workhorse/authorize_upload'), headers: headers) }
+
+ def expect_status(status)
+ subject
+ expect(response).to have_gitlab_http_status(status)
+ end
+
+ context 'without workhorse internal header' do
+ it { expect_status(:forbidden) }
+ end
+
+ context 'with workhorse internal header' do
+ let(:headers) { workhorse_internal_api_request_header }
+
+ it { expect_status(:unauthorized) }
+
+ context 'as a logged in user' do
+ before do
+ login_as(user)
+ end
+
+ it { expect_status(:success) }
+ it 'returns the temp upload path' do
+ subject
+ expect(json_response['TempPath']).to eq(Rails.root.join('tmp/tests/public/uploads/tmp').to_s)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/serializers/prometheus_alert_entity_spec.rb b/spec/serializers/prometheus_alert_entity_spec.rb
index ae8c97401f8..91a1e3377c2 100644
--- a/spec/serializers/prometheus_alert_entity_spec.rb
+++ b/spec/serializers/prometheus_alert_entity_spec.rb
@@ -18,9 +18,5 @@ RSpec.describe PrometheusAlertEntity do
it 'exposes prometheus_alert attributes' do
expect(subject).to include(:id, :title, :query, :operator, :threshold, :runbook_url)
end
-
- it 'exposes alert_path' do
- expect(subject).to include(:alert_path)
- end
end
end
diff --git a/spec/services/metrics/dashboard/panel_preview_service_spec.rb b/spec/services/metrics/dashboard/panel_preview_service_spec.rb
index 2877d22d1f3..787c61cc918 100644
--- a/spec/services/metrics/dashboard/panel_preview_service_spec.rb
+++ b/spec/services/metrics/dashboard/panel_preview_service_spec.rb
@@ -45,7 +45,6 @@ RSpec.describe Metrics::Dashboard::PanelPreviewService do
::Gitlab::Metrics::Dashboard::Stages::CommonMetricsInserter,
::Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter,
::Gitlab::Metrics::Dashboard::Stages::PanelIdsInserter,
- ::Gitlab::Metrics::Dashboard::Stages::AlertsInserter,
::Gitlab::Metrics::Dashboard::Stages::UrlValidator
]
processor_params = [project, dashboard, sequence, environment: environment]
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index adea79509b3..86e9a176798 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -492,7 +492,6 @@ RSpec.describe Projects::DestroyService, :aggregate_failures, :event_store_publi
expect do
destroy_project(project, user)
end.to change(WebHook, :count).by(-2)
- .and change(WebHookLog, :count).by(-1)
end
context 'when an error is raised deleting webhooks' do
diff --git a/spec/services/service_response_spec.rb b/spec/services/service_response_spec.rb
index 082ee4ddc67..3ede90fbc44 100644
--- a/spec/services/service_response_spec.rb
+++ b/spec/services/service_response_spec.rb
@@ -2,7 +2,10 @@
require 'fast_spec_helper'
+require 're2'
+
require_relative '../../app/services/service_response'
+require_relative '../../lib/gitlab/error_tracking'
RSpec.describe ServiceResponse do
describe '.success' do
@@ -94,4 +97,76 @@ RSpec.describe ServiceResponse do
expect(described_class.error(message: 'error message').errors).to eq(['error message'])
end
end
+
+ describe '#track_and_raise_exception' do
+ context 'when successful' do
+ let(:response) { described_class.success }
+
+ it 'returns self' do
+ expect(response.track_and_raise_exception).to be response
+ end
+ end
+
+ context 'when an error' do
+ let(:response) { described_class.error(message: 'bang') }
+
+ it 'tracks and raises' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(StandardError.new('bang'), {})
+
+ response.track_and_raise_exception
+ end
+
+ it 'allows specification of error class' do
+ error = Class.new(StandardError)
+ expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(error.new('bang'), {})
+
+ response.track_and_raise_exception(as: error)
+ end
+
+ it 'allows extra data for tracking' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_and_raise_exception)
+ .with(StandardError.new('bang'), { foo: 1, bar: 2 })
+
+ response.track_and_raise_exception(foo: 1, bar: 2)
+ end
+ end
+ end
+
+ describe '#track_exception' do
+ context 'when successful' do
+ let(:response) { described_class.success }
+
+ it 'returns self' do
+ expect(response.track_exception).to be response
+ end
+ end
+
+ context 'when an error' do
+ let(:response) { described_class.error(message: 'bang') }
+
+ it 'tracks' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(StandardError.new('bang'), {})
+
+ expect(response.track_exception).to be response
+ end
+
+ it 'allows specification of error class' do
+ error = Class.new(StandardError)
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(error.new('bang'), {})
+
+ expect(response.track_exception(as: error)).to be response
+ end
+
+ it 'allows extra data for tracking' do
+ expect(::Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(StandardError.new('bang'), { foo: 1, bar: 2 })
+
+ expect(response.track_exception(foo: 1, bar: 2)).to be response
+ end
+ end
+ end
end
diff --git a/spec/services/web_hooks/destroy_service_spec.rb b/spec/services/web_hooks/destroy_service_spec.rb
index 5269fe08ac0..4d9bb18e540 100644
--- a/spec/services/web_hooks/destroy_service_spec.rb
+++ b/spec/services/web_hooks/destroy_service_spec.rb
@@ -7,50 +7,46 @@ RSpec.describe WebHooks::DestroyService do
subject { described_class.new(user) }
- shared_examples 'batched destroys' do
- it 'destroys all hooks in batches' do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- expect(subject).to receive(:delete_web_hook_logs_in_batches).exactly(4).times.and_call_original
-
- expect do
- status = subject.execute(hook)
- expect(status[:async]).to be false
- end
- .to change { WebHook.count }.from(1).to(0)
- .and change { WebHookLog.count }.from(3).to(0)
- end
-
- it 'returns an error if sync destroy fails' do
- expect(hook).to receive(:destroy).and_return(false)
+ describe '#execute' do
+ %i[system_hook project_hook].each do |factory|
+ context "deleting a #{factory}" do
+ let!(:hook) { create(factory) } # rubocop: disable Rails/SaveBang (false-positive!)
+ let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
- result = subject.sync_destroy(hook)
-
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Unable to destroy #{hook.model_name.human}")
- end
+ it 'is successful' do
+ expect(subject.execute(hook)).to be_success
+ end
- it 'schedules an async delete' do
- stub_const('WebHooks::DestroyService::LOG_COUNT_THRESHOLD', 1)
+ it 'destroys the hook' do
+ expect { subject.execute(hook) }.to change(WebHook, :count).from(1).to(0)
+ end
- expect(WebHooks::DestroyWorker).to receive(:perform_async).with(user.id, hook.id).and_call_original
+ it 'does not destroy logs' do
+ expect { subject.execute(hook) }.not_to change(WebHookLog, :count)
+ end
- status = subject.execute(hook)
+ it 'schedules the destruction of logs' do
+ expect(WebHooks::LogDestroyWorker).to receive(:perform_async).with({ 'hook_id' => hook.id })
+ expect(Gitlab::AppLogger).to receive(:info).with(match(/scheduled a deletion of logs/))
- expect(status[:async]).to be true
- end
- end
+ subject.execute(hook)
+ end
- context 'with system hook' do
- let!(:hook) { create(:system_hook, url: "http://example.com") }
- let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+ context 'when the hook fails to destroy' do
+ before do
+ allow(hook).to receive(:destroy).and_return(false)
+ end
- it_behaves_like 'batched destroys'
- end
+ it 'is not a success' do
+ expect(WebHooks::LogDestroyWorker).not_to receive(:perform_async)
- context 'with project hook' do
- let!(:hook) { create(:project_hook) }
- let!(:log) { create_list(:web_hook_log, 3, web_hook: hook) }
+ r = subject.execute(hook)
- it_behaves_like 'batched destroys'
+ expect(r).to be_error
+ expect(r[:message]).to match %r{Unable to destroy}
+ end
+ end
+ end
+ end
end
end
diff --git a/spec/services/web_hooks/log_destroy_service_spec.rb b/spec/services/web_hooks/log_destroy_service_spec.rb
new file mode 100644
index 00000000000..7634726e5a4
--- /dev/null
+++ b/spec/services/web_hooks/log_destroy_service_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::LogDestroyService do
+ subject(:service) { described_class.new(hook.id) }
+
+ describe '#execute' do
+ shared_examples 'deletes web hook logs for hook' do
+ before do
+ create_list(:web_hook_log, 3, web_hook: hook)
+ hook.destroy! # The LogDestroyService is expected to be called _after_ hook destruction
+ end
+
+ it 'deletes the logs' do
+ expect { service.execute }
+ .to change(WebHookLog, :count).from(3).to(0)
+ end
+
+ context 'when the data-set exceeds the batch size' do
+ before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+ end
+
+ it 'deletes the logs' do
+ expect { service.execute }
+ .to change(WebHookLog, :count).from(3).to(0)
+ end
+ end
+
+ context 'when it encounters an error' do
+ before do
+ allow(WebHookLog).to receive(:delete_batch_for).and_raise(StandardError.new('bang'))
+ end
+
+ it 'reports the error' do
+ expect(service.execute)
+ .to be_error
+ .and have_attributes(message: 'bang')
+ end
+ end
+ end
+
+ context 'with system hook' do
+ let!(:hook) { create(:system_hook, url: "http://example.com") }
+
+ it_behaves_like 'deletes web hook logs for hook'
+ end
+
+ context 'with project hook' do
+ let!(:hook) { create(:project_hook) }
+
+ it_behaves_like 'deletes web hook logs for hook'
+ end
+ end
+end
diff --git a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
index 0e7b4ea504c..da9c5833926 100644
--- a/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
+++ b/spec/workers/projects/inactive_projects_deletion_cron_worker_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
end
it 'does not invoke Projects::InactiveProjectsDeletionNotificationWorker' do
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -68,7 +68,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
end
it 'does not invoke Projects::InactiveProjectsDeletionNotificationWorker' do
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -82,8 +82,6 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
end
context 'when feature flag is enabled', :clean_gitlab_redis_shared_state, :sidekiq_inline do
- let_it_be(:delay) { anything }
-
before do
stub_feature_flags(inactive_projects_deletion: true)
end
@@ -93,8 +91,8 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
expect(redis).to receive(:hset).with('inactive_projects_deletion_warning_email_notified',
"project:#{inactive_large_project.id}", Date.current)
end
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).to receive(:perform_in).with(
- delay, inactive_large_project.id, deletion_date).and_call_original
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).to receive(:perform_async).with(
+ inactive_large_project.id, deletion_date).and_call_original
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -106,7 +104,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
Date.current.to_s)
end
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).not_to receive(:new)
worker.perform
@@ -118,7 +116,7 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
15.months.ago.to_date.to_s)
end
- expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_in)
+ expect(::Projects::InactiveProjectsDeletionNotificationWorker).not_to receive(:perform_async)
expect(::Projects::DestroyService).to receive(:new).with(inactive_large_project, admin_user, {})
.at_least(:once).and_call_original
@@ -131,6 +129,34 @@ RSpec.describe Projects::InactiveProjectsDeletionCronWorker do
"project:#{inactive_large_project.id}")).to be_nil
end
end
+
+ context 'when the worker is running for more than 4 minutes' do
+ before do
+ subject.instance_variable_set(:@start_time, ::Gitlab::Metrics::System.monotonic_time - 5.minutes)
+ end
+
+ it 'stores the last processed inactive project_id in redis cache' do
+ Gitlab::Redis::Cache.with do |redis|
+ expect { worker.perform }
+ .to change { redis.get('last_processed_inactive_project_id') }.to(inactive_large_project.id.to_s)
+ end
+ end
+ end
+
+ context 'when the worker finishes processing in less than 4 minutes' do
+ before do
+ Gitlab::Redis::Cache.with do |redis|
+ redis.set('last_processed_inactive_project_id', inactive_large_project.id)
+ end
+ end
+
+ it 'clears the last processed inactive project_id from redis cache' do
+ Gitlab::Redis::Cache.with do |redis|
+ expect { worker.perform }
+ .to change { redis.get('last_processed_inactive_project_id') }.to(nil)
+ end
+ end
+ end
end
it_behaves_like 'an idempotent worker'
diff --git a/spec/workers/web_hooks/destroy_worker_spec.rb b/spec/workers/web_hooks/destroy_worker_spec.rb
index fd26c8591ee..8e75610a031 100644
--- a/spec/workers/web_hooks/destroy_worker_spec.rb
+++ b/spec/workers/web_hooks/destroy_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe WebHooks::DestroyWorker do
+ include AfterNextHelpers
+
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
@@ -20,23 +22,26 @@ RSpec.describe WebHooks::DestroyWorker do
let!(:other_log) { create(:web_hook_log, web_hook: other_hook) }
it "deletes the Web hook and logs", :aggregate_failures do
+ expect(WebHooks::LogDestroyWorker).to receive(:perform_async)
+
expect { subject.perform(user.id, hook.id) }
- .to change { WebHookLog.count }.from(2).to(1)
- .and change { WebHook.count }.from(2).to(1)
+ .to change { WebHook.count }.from(2).to(1)
expect(WebHook.find(other_hook.id)).to be_present
expect(WebHookLog.find(other_log.id)).to be_present
end
it "raises and tracks an error if destroy failed" do
- allow_next_instance_of(::WebHooks::DestroyService) do |instance|
- expect(instance).to receive(:sync_destroy).with(anything).and_return({ status: :error, message: "failed" })
- end
+ expect_next(::WebHooks::DestroyService)
+ .to receive(:sync_destroy).with(anything)
+ .and_return(ServiceResponse.error(message: "failed"))
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_exception)
+ .with(an_instance_of(described_class::DestroyError), { web_hook_id: hook.id })
+ .and_call_original
- expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(an_instance_of(::WebHooks::DestroyService::DestroyError), web_hook_id: hook.id)
- .and_call_original
- expect { subject.perform(user.id, hook.id) }.to raise_error(::WebHooks::DestroyService::DestroyError)
+ expect { subject.perform(user.id, hook.id) }.to raise_error(described_class::DestroyError)
end
context 'with unknown hook' do
diff --git a/spec/workers/web_hooks/log_destroy_worker_spec.rb b/spec/workers/web_hooks/log_destroy_worker_spec.rb
new file mode 100644
index 00000000000..0c107c05360
--- /dev/null
+++ b/spec/workers/web_hooks/log_destroy_worker_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WebHooks::LogDestroyWorker do
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project) }
+
+ subject { described_class.new }
+
+ describe "#perform" do
+ let!(:hook) { create(:project_hook, project: project) }
+ let!(:other_hook) { create(:project_hook, project: project) }
+ let!(:log) { create(:web_hook_log, web_hook: hook) }
+ let!(:other_log) { create(:web_hook_log, web_hook: other_hook) }
+
+ context 'with a Web hook' do
+ it "deletes the relevant logs", :aggregate_failures do
+ hook.destroy! # It does not depend on the presence of the hook
+
+ expect { subject.perform({ 'hook_id' => hook.id }) }
+ .to change { WebHookLog.count }.by(-1)
+
+ expect(WebHook.find(other_hook.id)).to be_present
+ expect(WebHookLog.find(other_log.id)).to be_present
+ end
+
+ it 'is idempotent' do
+ subject.perform({ 'hook_id' => hook.id })
+ subject.perform({ 'hook_id' => hook.id })
+
+ expect(hook.web_hook_logs).to be_none
+ end
+
+ it "raises and tracks an error if destroy failed" do
+ expect_next(::WebHooks::LogDestroyService)
+ .to receive(:execute).and_return(ServiceResponse.error(message: "failed"))
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_exception)
+ .with(an_instance_of(described_class::DestroyError), { web_hook_id: hook.id })
+ .and_call_original
+
+ expect { subject.perform({ 'hook_id' => hook.id }) }
+ .to raise_error(described_class::DestroyError)
+ end
+
+ context 'with extra arguments' do
+ it 'does not raise an error' do
+ expect { subject.perform({ 'hook_id' => hook.id, 'extra' => true }) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(1)
+ end
+ end
+ end
+
+ context 'with no arguments' do
+ it 'does not raise an error' do
+ expect { subject.perform }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(2)
+ end
+ end
+
+ context 'with empty arguments' do
+ it 'does not raise an error' do
+ expect { subject.perform({}) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(2)
+ end
+ end
+
+ context 'with unknown hook' do
+ it 'does not raise an error' do
+ expect { subject.perform({ 'hook_id' => non_existing_record_id }) }.not_to raise_error
+
+ expect(WebHook.count).to eq(2)
+ expect(WebHookLog.count).to eq(2)
+ end
+ end
+ end
+end
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index 8954923ad75..a00cff34b89 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -3,6 +3,7 @@ package api
import (
"bytes"
"encoding/json"
+ "errors"
"fmt"
"io"
"net/http"
@@ -263,26 +264,22 @@ func (api *API) newRequest(r *http.Request, suffix string) (*http.Request, error
// PreAuthorize performs a pre-authorization check against the API for the given HTTP request
//
-// If `outErr` is set, the other fields will be nil and it should be treated as
-// a 500 error.
+// If the returned *http.Response is not nil, the caller is responsible for closing its body
//
-// If httpResponse is present, the caller is responsible for closing its body
-//
-// authResponse will only be present if the authorization check was successful
-func (api *API) PreAuthorize(suffix string, r *http.Request) (httpResponse *http.Response, authResponse *Response, outErr error) {
+// Only upon successful authorization do we return a non-nil *Response
+func (api *API) PreAuthorize(suffix string, r *http.Request) (_ *http.Response, _ *Response, err error) {
authReq, err := api.newRequest(r, suffix)
if err != nil {
return nil, nil, fmt.Errorf("preAuthorizeHandler newUpstreamRequest: %v", err)
}
- httpResponse, err = api.doRequestWithoutRedirects(authReq)
+ httpResponse, err := api.doRequestWithoutRedirects(authReq)
if err != nil {
return nil, nil, fmt.Errorf("preAuthorizeHandler: do request: %v", err)
}
defer func() {
- if outErr != nil {
+ if err != nil {
httpResponse.Body.Close()
- httpResponse = nil
}
}()
requestsCounter.WithLabelValues(strconv.Itoa(httpResponse.StatusCode), authReq.Method).Inc()
@@ -293,17 +290,43 @@ func (api *API) PreAuthorize(suffix string, r *http.Request) (httpResponse *http
return httpResponse, nil, nil
}
- authResponse = &Response{}
+ authResponse := &Response{}
// The auth backend validated the client request and told us additional
// request metadata. We must extract this information from the auth
// response body.
if err := json.NewDecoder(httpResponse.Body).Decode(authResponse); err != nil {
- return httpResponse, nil, fmt.Errorf("preAuthorizeHandler: decode authorization response: %v", err)
+ return nil, nil, fmt.Errorf("preAuthorizeHandler: decode authorization response: %v", err)
}
return httpResponse, authResponse, nil
}
+// PreAuthorizeFixedPath makes an internal Workhorse API call to a fixed
+// path, using the HTTP headers of r.
+func (api *API) PreAuthorizeFixedPath(r *http.Request, method string, path string) (*Response, error) {
+ authReq, err := http.NewRequestWithContext(r.Context(), method, api.URL.String(), nil)
+ if err != nil {
+ return nil, fmt.Errorf("construct auth request: %w", err)
+ }
+ authReq.Header = helper.HeaderClone(r.Header)
+
+ ignoredResponse, apiResponse, err := api.PreAuthorize(path, authReq)
+ if err != nil {
+ return nil, fmt.Errorf("PreAuthorize: %w", err)
+ }
+
+ // We don't need the contents of ignoredResponse but we are responsible
+ // for closing it. Part of the reason PreAuthorizeFixedPath exists is to
+ // hide this awkwardness.
+ ignoredResponse.Body.Close()
+
+ if apiResponse == nil {
+ return nil, errors.New("no api response on fixed path")
+ }
+
+ return apiResponse, nil
+}
+
func (api *API) PreAuthorizeHandler(next HandleFunc, suffix string) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
httpResponse, authResponse, err := api.PreAuthorize(suffix, r)
diff --git a/workhorse/internal/upload/multipart_uploader.go b/workhorse/internal/upload/multipart_uploader.go
index c029b484955..2456a2c8626 100644
--- a/workhorse/internal/upload/multipart_uploader.go
+++ b/workhorse/internal/upload/multipart_uploader.go
@@ -24,10 +24,18 @@ func Multipart(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
// where we cannot pre-authorize both because we don't know which Rails
// endpoint to call, and because eagerly pre-authorizing would add too
// much overhead.
-func SkipRailsPreAuthMultipart(tempPath string, h http.Handler, p Preparer) http.Handler {
+func SkipRailsPreAuthMultipart(tempPath string, myAPI *api.API, h http.Handler, p Preparer) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
s := &SavedFileTracker{Request: r}
- fa := &eagerAuthorizer{&api.Response{TempPath: tempPath}}
+
+ // We use testAuthorizer as a temporary measure. When
+ // https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/742 is done, we
+ // should only be using apiAuthorizer.
+ fa := &testAuthorizer{
+ test: &apiAuthorizer{myAPI},
+ actual: &eagerAuthorizer{&api.Response{TempPath: tempPath}},
+ }
+
interceptMultipartFiles(w, r, h, s, fa, p)
})
}
diff --git a/workhorse/internal/upload/rewrite.go b/workhorse/internal/upload/rewrite.go
index 91b5f7c01d5..200b3efc554 100644
--- a/workhorse/internal/upload/rewrite.go
+++ b/workhorse/internal/upload/rewrite.go
@@ -16,10 +16,10 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
- "gitlab.com/gitlab-org/labkit/log"
-
"golang.org/x/image/tiff"
+ "gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
+
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/lsif_transformer/parser"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
@@ -233,14 +233,14 @@ func handleExifUpload(ctx context.Context, r io.Reader, filename string, imageTy
log.WithContextFields(ctx, log.Fields{
"filename": filename,
"imageType": imageType,
- }).Print("invalid content type, not running exiftool")
+ }).Info("invalid content type, not running exiftool")
return tmpfile, nil
}
log.WithContextFields(ctx, log.Fields{
"filename": filename,
- }).Print("running exiftool to remove any metadata")
+ }).Info("running exiftool to remove any metadata")
cleaner, err := exif.NewCleaner(ctx, tmpfile)
if err != nil {
@@ -309,3 +309,35 @@ func (ea *eagerAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error)
}
var _ fileAuthorizer = &eagerAuthorizer{}
+
+type apiAuthorizer struct {
+ api *api.API
+}
+
+func (aa *apiAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
+ return aa.api.PreAuthorizeFixedPath(
+ r,
+ "POST",
+ "/api/v4/internal/workhorse/authorize_upload",
+ )
+}
+
+var _ fileAuthorizer = &apiAuthorizer{}
+
+type testAuthorizer struct {
+ test fileAuthorizer
+ actual fileAuthorizer
+}
+
+func (ta *testAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
+ logger := log.WithRequest(r)
+ if response, err := ta.test.AuthorizeFile(r); err != nil {
+ logger.WithError(err).Error("test api preauthorize request failed")
+ } else {
+ logger.WithFields(log.Fields{
+ "temp_path": response.TempPath,
+ }).Info("test api preauthorize request")
+ }
+
+ return ta.actual.AuthorizeFile(r)
+}
diff --git a/workhorse/internal/upstream/routes.go b/workhorse/internal/upstream/routes.go
index 1a514bbc225..95c9b99b833 100644
--- a/workhorse/internal/upstream/routes.go
+++ b/workhorse/internal/upstream/routes.go
@@ -223,7 +223,7 @@ func configureRoutes(u *upstream) {
mimeMultipartUploader := upload.Multipart(api, signingProxy, preparer)
uploadPath := path.Join(u.DocumentRoot, "uploads/tmp")
- tempfileMultipartProxy := upload.SkipRailsPreAuthMultipart(uploadPath, proxy, preparer)
+ tempfileMultipartProxy := upload.SkipRailsPreAuthMultipart(uploadPath, api, proxy, preparer)
ciAPIProxyQueue := queueing.QueueRequests("ci_api_job_requests", tempfileMultipartProxy, u.APILimit, u.APIQueueLimit, u.APIQueueTimeout)
ciAPILongPolling := builds.RegisterHandler(ciAPIProxyQueue, redis.WatchKey, u.APICILongPollingDuration)
diff --git a/workhorse/upload_test.go b/workhorse/upload_test.go
index 180598ab260..5a08e80798a 100644
--- a/workhorse/upload_test.go
+++ b/workhorse/upload_test.go
@@ -68,7 +68,7 @@ func expectSignedRequest(t *testing.T, r *http.Request) {
func uploadTestServer(t *testing.T, authorizeTests func(r *http.Request), extraTests func(r *http.Request)) *httptest.Server {
return testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
- if strings.HasSuffix(r.URL.Path, "/authorize") {
+ if strings.HasSuffix(r.URL.Path, "/authorize") || r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
expectSignedRequest(t, r)
w.Header().Set("Content-Type", api.ResponseContentType)
@@ -154,6 +154,10 @@ func TestAcceleratedUpload(t *testing.T) {
t.Run(tt.resource, func(t *testing.T) {
ts := uploadTestServer(t,
func(r *http.Request) {
+ if r.URL.Path == "/api/v4/internal/workhorse/authorize_upload" {
+ // Nothing to validate: this is a hard coded URL
+ return
+ }
resource := strings.TrimRight(tt.resource, "/")
// Validate %2F characters haven't been unescaped
require.Equal(t, resource+"/authorize", r.URL.String())
@@ -270,24 +274,23 @@ func TestUnacceleratedUploads(t *testing.T) {
func TestBlockingRewrittenFieldsHeader(t *testing.T) {
canary := "untrusted header passed by user"
+ multiPartBody, multiPartContentType, err := multipartBodyWithFile()
+ require.NoError(t, err)
+
testCases := []struct {
desc string
contentType string
body io.Reader
present bool
}{
- {"multipart with file", "", nil, true}, // placeholder
+ {"multipart with file", multiPartContentType, multiPartBody, true},
{"no multipart", "text/plain", nil, false},
}
- var err error
- testCases[0].body, testCases[0].contentType, err = multipartBodyWithFile()
- require.NoError(t, err)
-
for _, tc := range testCases {
ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
key := upload.RewrittenFieldsHeader
- if tc.present {
+ if tc.present && r.URL.Path != "/api/v4/internal/workhorse/authorize_upload" {
require.Contains(t, r.Header, key)
} else {
require.NotContains(t, r.Header, key)