Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab/ci/database.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/qa-common/main.gitlab-ci.yml3
-rw-r--r--.gitlab/ci/qa-common/variables.gitlab-ci.yml1
-rw-r--r--.gitlab/ci/rules.gitlab-ci.yml21
-rw-r--r--app/assets/javascripts/notes/stores/actions.js9
-rw-r--r--app/assets/javascripts/snippets/components/show.vue9
-rw-r--r--app/assets/stylesheets/framework/files.scss1
-rw-r--r--app/assets/stylesheets/page_bundles/editor.scss9
-rw-r--r--app/graphql/queries/snippet/snippet.query.graphql1
-rw-r--r--app/graphql/resolvers/noteable/notes_resolver.rb21
-rw-r--r--app/graphql/types/alert_management/alert_type.rb4
-rw-r--r--app/graphql/types/ci/catalog/resource_type.rb7
-rw-r--r--app/graphql/types/notes/note_type.rb4
-rw-r--r--app/graphql/types/notes/noteable_interface.rb2
-rw-r--r--app/models/application_setting.rb1
-rw-r--r--app/models/plan_limits.rb7
-rw-r--r--app/models/uploads/fog.rb43
-rw-r--r--app/services/merge_requests/after_create_service.rb15
-rw-r--r--app/services/object_storage/delete_stale_direct_uploads_service.rb35
-rw-r--r--app/workers/all_queues.yml9
-rw-r--r--app/workers/object_storage/delete_stale_direct_uploads_worker.rb29
-rw-r--r--config/events/1671198983_Gitlab__UsageDataCounters__MergeRequestActivityUniqueCounter_create.yml10
-rw-r--r--config/feature_flags/development/stale_pending_direct_uploads_cleaner.yml8
-rw-r--r--config/initializers/1_settings.rb3
-rw-r--r--db/migrate/20230523125430_add_dismissal_reason_to_vulnerability_read.rb13
-rw-r--r--db/post_migrate/20230523131914_recreate_index_on_vulnerability_reads.rb34
-rw-r--r--db/post_migrate/20230523132647_recreate_index_on_vulnerability_reads2.rb34
-rw-r--r--db/schema_migrations/202305231254301
-rw-r--r--db/schema_migrations/202305231319141
-rw-r--r--db/schema_migrations/202305231326471
-rw-r--r--db/structure.sql5
-rw-r--r--doc/administration/auth/ldap/index.md2
-rw-r--r--doc/administration/docs_self_host.md2
-rw-r--r--doc/administration/geo/setup/index.md2
-rw-r--r--doc/administration/monitoring/prometheus/gitlab_metrics.md1
-rw-r--r--doc/api/graphql/reference/index.md2
-rw-r--r--doc/api/namespaces.md17
-rw-r--r--doc/architecture/blueprints/cells/index.md4
-rw-r--r--doc/architecture/blueprints/organization/index.md10
-rw-r--r--doc/ci/runners/runners_scope.md2
-rw-r--r--doc/ci/runners/saas/linux_saas_runner.md3
-rw-r--r--doc/development/ai_architecture.md2
-rw-r--r--doc/gitlab-basics/start-using-git.md2
-rw-r--r--doc/raketasks/backup_gitlab.md2
-rw-r--r--doc/topics/gitlab_flow.md4
-rw-r--r--doc/tutorials/configure_gitlab_runner_to_use_gke/index.md201
-rw-r--r--doc/update/zero_downtime.md2
-rw-r--r--doc/user/admin_area/settings/usage_statistics.md1
-rw-r--r--doc/user/group/compliance_frameworks.md2
-rw-r--r--doc/user/group/manage.md2
-rw-r--r--doc/user/project/repository/code_suggestions.md2
-rw-r--r--lib/api/entities/namespace.rb8
-rw-r--r--lib/atlassian/jira_issue_key_extractor.rb4
-rw-r--r--lib/backup/manager.rb5
-rw-r--r--lib/gitlab/bitbucket_import/importer.rb22
-rw-r--r--lib/gitlab/internal_events.rb44
-rw-r--r--lib/gitlab/sidekiq_logging/structured_logger.rb20
-rw-r--r--lib/gitlab/sidekiq_middleware/defer_jobs.rb13
-rw-r--r--lib/gitlab/tracking.rb16
-rw-r--r--lib/gitlab/usage_data_counters/merge_request_activity_unique_counter.rb17
-rw-r--r--lib/object_storage/fog_helpers.rb51
-rw-r--r--lib/object_storage/pending_direct_upload.rb88
-rw-r--r--lib/sidebars/admin/menus/monitoring_menu.rb13
-rw-r--r--lib/tasks/gitlab/backup.rake10
-rw-r--r--locale/gitlab.pot3
-rw-r--r--qa/Gemfile2
-rw-r--r--qa/Gemfile.lock4
-rw-r--r--spec/frontend/notes/stores/actions_spec.js25
-rw-r--r--spec/frontend/snippets/components/show_spec.js40
-rw-r--r--spec/frontend/snippets/test_utils.js1
-rw-r--r--spec/graphql/types/ci/catalog/resource_type_spec.rb1
-rw-r--r--spec/graphql/types/notes/note_type_spec.rb1
-rw-r--r--spec/lib/atlassian/jira_issue_key_extractor_spec.rb8
-rw-r--r--spec/lib/gitlab/bitbucket_import/importer_spec.rb43
-rw-r--r--spec/lib/gitlab/internal_events_spec.rb65
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb15
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb13
-rw-r--r--spec/lib/gitlab/tracking_spec.rb16
-rw-r--r--spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb43
-rw-r--r--spec/lib/object_storage/fog_helpers_spec.rb49
-rw-r--r--spec/lib/object_storage/pending_direct_upload_spec.rb111
-rw-r--r--spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb28
-rw-r--r--spec/models/plan_limits_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb76
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb89
-rw-r--r--spec/requests/api/namespaces_spec.rb4
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb25
-rw-r--r--spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb108
-rw-r--r--spec/spec_helper.rb1
-rw-r--r--spec/support/helpers/pending_direct_uploads_helpers.rb34
-rw-r--r--spec/support/helpers/test_env.rb5
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb7
-rw-r--r--spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb24
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb17
-rw-r--r--spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb72
95 files changed, 1591 insertions, 264 deletions
diff --git a/.gitlab/ci/database.gitlab-ci.yml b/.gitlab/ci/database.gitlab-ci.yml
index 941cb9224fb..45aa6a35d6c 100644
--- a/.gitlab/ci/database.gitlab-ci.yml
+++ b/.gitlab/ci/database.gitlab-ci.yml
@@ -127,7 +127,7 @@ db:backup_and_restore:
script:
- . scripts/prepare_build.sh
- bundle exec rake db:drop db:create db:schema:load db:seed_fu
- - mkdir -p tmp/tests/public/uploads tmp/tests/{artifacts,pages,lfs-objects,terraform_state,registry,packages}
+ - mkdir -p tmp/tests/public/uploads tmp/tests/{artifacts,pages,lfs-objects,terraform_state,registry,packages,ci_secure_files}
- bundle exec rake gitlab:backup:create
- date
- bundle exec rake gitlab:backup:restore
diff --git a/.gitlab/ci/qa-common/main.gitlab-ci.yml b/.gitlab/ci/qa-common/main.gitlab-ci.yml
index c9b60b078bb..51f19a8ee0d 100644
--- a/.gitlab/ci/qa-common/main.gitlab-ci.yml
+++ b/.gitlab/ci/qa-common/main.gitlab-ci.yml
@@ -6,7 +6,7 @@ workflow:
include:
- project: gitlab-org/quality/pipeline-common
- ref: 5.4.0
+ ref: 6.3.0
file:
- /ci/base.gitlab-ci.yml
- /ci/allure-report.yml
@@ -219,6 +219,7 @@ stages:
echo "Test suite passed. Exiting..."
exit 0
fi
+ - bundle exec update-screenshot-paths --input-files "${CI_PROJECT_DIR}/gitlab-qa-run-*/**/rspec-*.xml"
- |
bundle exec relate-failure-issue \
--input-files "${QA_RSPEC_JSON_FILE_PATTERN}" \
diff --git a/.gitlab/ci/qa-common/variables.gitlab-ci.yml b/.gitlab/ci/qa-common/variables.gitlab-ci.yml
index 0322247a89d..817f2e0020a 100644
--- a/.gitlab/ci/qa-common/variables.gitlab-ci.yml
+++ b/.gitlab/ci/qa-common/variables.gitlab-ci.yml
@@ -3,7 +3,6 @@
variables:
REGISTRY_HOST: "registry.gitlab.com"
REGISTRY_GROUP: "gitlab-org"
- SKIP_REPORT_IN_ISSUES: "true"
SKIP_OMNIBUS_TRIGGER: "true"
OMNIBUS_GITLAB_CACHE_UPDATE: "false"
OMNIBUS_GITLAB_RUBY3_BUILD: "false"
diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml
index 570171a77d2..9ab2e1591d5 100644
--- a/.gitlab/ci/rules.gitlab-ci.yml
+++ b/.gitlab/ci/rules.gitlab-ci.yml
@@ -540,9 +540,6 @@
- "config.ru"
- "{,ee/,jh/}{app,bin,config,db,generator_templates,haml_lint,lib,locale,public,scripts,storybook,symbol,vendor}/**/*"
- "doc/api/graphql/reference/*" # Files in this folder are auto-generated
- # Mapped patterns (see tests.yml)
- - "data/whats_new/*.yml"
- - "doc/index.md"
# CI changes
- ".gitlab-ci.yml"
- ".gitlab/ci/**/*"
@@ -556,6 +553,9 @@
# QA changes
- ".dockerignore"
- "{,jh/}qa/**/*"
+ # Mapped patterns (see tests.yml)
+ - "data/whats_new/*.yml"
+ - "doc/index.md"
# Workhorse changes
- "GITLAB_WORKHORSE_VERSION"
- "workhorse/**/*"
@@ -833,6 +833,7 @@
- <<: *if-merge-request-labels-pipeline-expedite
when: never
- <<: *if-merge-request-targeting-stable-branch
+ changes: *setup-test-env-patterns
- <<: *if-merge-request-labels-run-review-app
- <<: *if-merge-request
changes: *ci-build-images-patterns
@@ -910,6 +911,7 @@
allow_failure: true
# The rest are included to be consistent with .qa:rules:e2e:test-on-gdk
- <<: *if-merge-request-targeting-stable-branch
+ changes: *setup-test-env-patterns
allow_failure: true
- <<: *if-ruby2-branch
allow_failure: true
@@ -945,6 +947,7 @@
- <<: *if-merge-request-labels-pipeline-expedite
when: never
- <<: *if-merge-request-targeting-stable-branch
+ changes: *setup-test-env-patterns
- <<: *if-ruby2-branch
- <<: *if-merge-request-labels-run-review-app
- <<: *if-auto-deploy-branches
@@ -1139,6 +1142,7 @@
- <<: *if-merge-request-labels-pipeline-expedite
when: never
- <<: *if-merge-request-targeting-stable-branch
+ changes: *setup-test-env-patterns
- <<: *if-merge-request-labels-run-review-app
- <<: *if-merge-request-labels-run-all-e2e
- <<: *if-auto-deploy-branches
@@ -1364,6 +1368,7 @@
- <<: *if-not-canonical-namespace
when: never
- <<: *if-merge-request-targeting-stable-branch
+ changes: *setup-test-env-patterns
when: always
###############
@@ -1414,6 +1419,7 @@
- <<: *if-merge-request-labels-pipeline-expedite
when: never
- <<: *if-merge-request-targeting-stable-branch
+ changes: *setup-test-env-patterns
allow_failure: true
- <<: *if-merge-request
changes: *code-backstage-qa-patterns
@@ -1455,6 +1461,7 @@
rules:
- !reference [".qa:rules:package-and-test-never-run", rules]
- <<: *if-merge-request-targeting-stable-branch
+ changes: *setup-test-env-patterns
allow_failure: true
- <<: *if-ruby2-branch
allow_failure: true
@@ -1503,7 +1510,7 @@
- <<: *if-dot-com-gitlab-org-schedule
allow_failure: true
variables:
- SKIP_REPORT_IN_ISSUES: "false"
+ CREATE_TEST_FAILURE_ISSUES: "true"
PROCESS_TEST_RESULTS: "true"
KNAPSACK_GENERATE_REPORT: "true"
UPDATE_QA_CACHE: "true"
@@ -1532,7 +1539,7 @@
- <<: *if-default-branch-schedule-nightly
allow_failure: true
variables:
- SKIP_REPORT_IN_ISSUES: "false"
+ CREATE_TEST_FAILURE_ISSUES: "true"
PROCESS_TEST_RESULTS: "true"
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false"
@@ -1560,7 +1567,7 @@
- <<: *if-default-branch-schedule-nightly
allow_failure: true
variables:
- SKIP_REPORT_IN_ISSUES: "false"
+ CREATE_TEST_FAILURE_ISSUES: "true"
PROCESS_TEST_RESULTS: "true"
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false"
@@ -1572,7 +1579,7 @@
variables:
KNAPSACK_GENERATE_REPORT: "true"
PROCESS_TEST_RESULTS: "true"
- SKIP_REPORT_IN_ISSUES: "false"
+ CREATE_TEST_FAILURE_ISSUES: "true"
QA_SAVE_TEST_METRICS: "true"
QA_EXPORT_TEST_METRICS: "false"
diff --git a/app/assets/javascripts/notes/stores/actions.js b/app/assets/javascripts/notes/stores/actions.js
index dc7f1577bbb..f7f211c65c2 100644
--- a/app/assets/javascripts/notes/stores/actions.js
+++ b/app/assets/javascripts/notes/stores/actions.js
@@ -95,12 +95,19 @@ export const fetchDiscussions = (
{ commit, dispatch, getters },
{ path, filter, persistFilter },
) => {
- const config =
+ let config =
filter !== undefined
? { params: { notes_filter: filter, persist_filter: persistFilter } }
: null;
if (
+ window.gon?.features?.mrActivityFilters &&
+ getters.noteableType === constants.MERGE_REQUEST_NOTEABLE_TYPE
+ ) {
+ config = { params: { notes_filter: 0, persist_filter: false } };
+ }
+
+ if (
getters.noteableType === constants.ISSUE_NOTEABLE_TYPE ||
getters.noteableType === constants.MERGE_REQUEST_NOTEABLE_TYPE
) {
diff --git a/app/assets/javascripts/snippets/components/show.vue b/app/assets/javascripts/snippets/components/show.vue
index 853293e5eb6..083474da23e 100644
--- a/app/assets/javascripts/snippets/components/show.vue
+++ b/app/assets/javascripts/snippets/components/show.vue
@@ -31,7 +31,14 @@ export default {
mixins: [getSnippetMixin],
computed: {
embeddable() {
- return this.snippet.visibilityLevel === VISIBILITY_LEVEL_PUBLIC_STRING;
+ return (
+ this.snippet.visibilityLevel === VISIBILITY_LEVEL_PUBLIC_STRING && !this.isInPrivateProject
+ );
+ },
+ isInPrivateProject() {
+ const projectVisibility = this.snippet?.project?.visibility;
+ const isLimitedVisibilityProject = projectVisibility !== VISIBILITY_LEVEL_PUBLIC_STRING;
+ return projectVisibility ? isLimitedVisibilityProject : false;
},
canBeCloned() {
return Boolean(this.snippet.sshUrlToRepo || this.snippet.httpUrlToRepo);
diff --git a/app/assets/stylesheets/framework/files.scss b/app/assets/stylesheets/framework/files.scss
index 374db25065e..0b5d20c7398 100644
--- a/app/assets/stylesheets/framework/files.scss
+++ b/app/assets/stylesheets/framework/files.scss
@@ -246,7 +246,6 @@ span.idiff {
justify-content: space-between;
background-color: $gray-light;
border-bottom: 1px solid $border-color;
- border-top: 1px solid $border-color;
padding: $gl-padding-8 $gl-padding;
margin: 0;
border-radius: $border-radius-default $border-radius-default 0 0;
diff --git a/app/assets/stylesheets/page_bundles/editor.scss b/app/assets/stylesheets/page_bundles/editor.scss
index 0c1979424b1..a1165279bc4 100644
--- a/app/assets/stylesheets/page_bundles/editor.scss
+++ b/app/assets/stylesheets/page_bundles/editor.scss
@@ -1,15 +1,6 @@
@import 'page_bundles/mixins_and_variables_and_functions';
.file-editor {
- .nav-links {
- border-top: 1px solid var(--border-color, $border-color);
- border-right: 1px solid var(--border-color, $border-color);
- border-left: 1px solid var(--border-color, $border-color);
- border-bottom: 0;
- border-radius: $border-radius-small $border-radius-small 0 0;
- background: var(--gray-50, $gray-50);
- }
-
#editor,
.editor {
@include gl-border-0;
diff --git a/app/graphql/queries/snippet/snippet.query.graphql b/app/graphql/queries/snippet/snippet.query.graphql
index 5c0c7ebaa1b..8712a6f4b01 100644
--- a/app/graphql/queries/snippet/snippet.query.graphql
+++ b/app/graphql/queries/snippet/snippet.query.graphql
@@ -53,6 +53,7 @@ query GetSnippetQuery($ids: [SnippetID!]) {
id
fullPath
webUrl
+ visibility
}
author {
__typename
diff --git a/app/graphql/resolvers/noteable/notes_resolver.rb b/app/graphql/resolvers/noteable/notes_resolver.rb
new file mode 100644
index 00000000000..35c7838cac0
--- /dev/null
+++ b/app/graphql/resolvers/noteable/notes_resolver.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Resolvers
+ module Noteable
+ class NotesResolver < BaseResolver
+ include LooksAhead
+
+ type Types::Notes::NoteType.connection_type, null: false
+
+ def resolve_with_lookahead(*)
+ apply_lookahead(object.notes.fresh)
+ end
+
+ def preloads
+ {
+ award_emoji: [:award_emoji]
+ }
+ end
+ end
+ end
+end
diff --git a/app/graphql/types/alert_management/alert_type.rb b/app/graphql/types/alert_management/alert_type.rb
index 5784c7a4872..36dd930c3d9 100644
--- a/app/graphql/types/alert_management/alert_type.rb
+++ b/app/graphql/types/alert_management/alert_type.rb
@@ -144,10 +144,6 @@ module Types
null: false,
description: 'URL of the alert.'
- def notes
- object.ordered_notes
- end
-
def metrics_dashboard_url
return if Feature.enabled?(:remove_monitor_metrics)
diff --git a/app/graphql/types/ci/catalog/resource_type.rb b/app/graphql/types/ci/catalog/resource_type.rb
index e4566aac9aa..7b3f746b666 100644
--- a/app/graphql/types/ci/catalog/resource_type.rb
+++ b/app/graphql/types/ci/catalog/resource_type.rb
@@ -21,10 +21,17 @@ module Types
field :icon, GraphQL::Types::String, null: true, description: 'Icon for the catalog resource.',
method: :avatar_path, alpha: { milestone: '15.11' }
+ field :web_path, GraphQL::Types::String, null: true, description: 'Web path of the catalog resource.',
+ alpha: { milestone: '16.1' }
+
field :versions, Types::ReleaseType.connection_type, null: true,
description: 'Versions of the catalog resource.',
resolver: Resolvers::ReleasesResolver,
alpha: { milestone: '16.1' }
+
+ def web_path
+ ::Gitlab::Routing.url_helpers.project_path(object.project)
+ end
end
# rubocop: enable Graphql/AuthorizeTypes
end
diff --git a/app/graphql/types/notes/note_type.rb b/app/graphql/types/notes/note_type.rb
index 5055facb21b..e36f4d19be5 100644
--- a/app/graphql/types/notes/note_type.rb
+++ b/app/graphql/types/notes/note_type.rb
@@ -36,6 +36,10 @@ module Types
method: :note,
description: 'Content of the note.'
+ field :award_emoji, Types::AwardEmojis::AwardEmojiType.connection_type,
+ null: true,
+ description: 'List of award emojis associated with the note.'
+
field :confidential, GraphQL::Types::Boolean,
null: true,
description: 'Indicates if this note is confidential.',
diff --git a/app/graphql/types/notes/noteable_interface.rb b/app/graphql/types/notes/noteable_interface.rb
index 537084dff62..9971511d6ce 100644
--- a/app/graphql/types/notes/noteable_interface.rb
+++ b/app/graphql/types/notes/noteable_interface.rb
@@ -5,7 +5,7 @@ module Types
module NoteableInterface
include Types::BaseInterface
- field :notes, Types::Notes::NoteType.connection_type, null: false, description: "All notes on this noteable."
+ field :notes, resolver: Resolvers::Noteable::NotesResolver, null: false, description: "All notes on this noteable."
field :discussions, Types::Notes::DiscussionType.connection_type, null: false, description: "All discussions on this noteable."
field :commenters, Types::UserType.connection_type, null: false, description: "All commenters on this noteable."
diff --git a/app/models/application_setting.rb b/app/models/application_setting.rb
index e8d638ed342..3be73c01b5a 100644
--- a/app/models/application_setting.rb
+++ b/app/models/application_setting.rb
@@ -13,7 +13,6 @@ class ApplicationSetting < MainClusterwide::ApplicationRecord
ignore_column :user_email_lookup_limit, remove_with: '15.0', remove_after: '2022-04-18'
ignore_column :send_user_confirmation_email, remove_with: '15.8', remove_after: '2022-12-18'
ignore_column :web_ide_clientside_preview_enabled, remove_with: '15.11', remove_after: '2023-04-22'
- ignore_column :clickhouse_connection_string, remove_with: '16.1', remove_after: '2023-05-22'
ignore_columns %i[instance_administration_project_id instance_administrators_group_id], remove_with: '16.2', remove_after: '2023-06-22'
INSTANCE_REVIEW_MIN_USERS = 50
diff --git a/app/models/plan_limits.rb b/app/models/plan_limits.rb
index 16a2d6df6aa..7544e0d1411 100644
--- a/app/models/plan_limits.rb
+++ b/app/models/plan_limits.rb
@@ -40,4 +40,11 @@ class PlanLimits < ApplicationRecord
limits = [limit, alternate_limit]
limits.map(&:to_i).select(&:positive?).min
end
+
+ # Overridden in EE
+ def dashboard_storage_limit_enabled?
+ false
+ end
end
+
+PlanLimits.prepend_mod_with('PlanLimits')
diff --git a/app/models/uploads/fog.rb b/app/models/uploads/fog.rb
index d2b8eab9f0d..3c31909fb07 100644
--- a/app/models/uploads/fog.rb
+++ b/app/models/uploads/fog.rb
@@ -2,11 +2,8 @@
module Uploads
class Fog < Base
- include ::Gitlab::Utils::StrongMemoize
-
- def available?
- object_store.enabled
- end
+ include ::ObjectStorage::FogHelpers
+ extend ::Gitlab::Utils::Override
def keys(relation)
return [] unless available?
@@ -20,39 +17,9 @@ module Uploads
private
- def delete_object(key)
- return unless available?
-
- connection.delete_object(bucket_name, object_key(key))
-
- # So far, only GoogleCloudStorage raises an exception when the file is not found.
- # Other providers support idempotent requests and does not raise an error
- # when the file is missing.
- rescue ::Google::Apis::ClientError => e
- Gitlab::ErrorTracking.log_exception(e)
- end
-
- def object_store
- Gitlab.config.uploads.object_store
- end
-
- def bucket_name
- object_store.remote_directory
- end
-
- def object_key(key)
- # We allow administrators to create "sub buckets" by setting a prefix.
- # This makes it possible to deploy GitLab with only one object storage
- # bucket. This mirrors the implementation in app/uploaders/object_storage.rb.
- File.join([object_store.bucket_prefix, key].compact)
- end
-
- def connection
- return unless available?
-
- strong_memoize(:connection) do
- ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
- end
+ override :storage_location_identifier
+ def storage_location_identifier
+ :uploads
end
end
end
diff --git a/app/services/merge_requests/after_create_service.rb b/app/services/merge_requests/after_create_service.rb
index f174778e12e..5c1ec5add73 100644
--- a/app/services/merge_requests/after_create_service.rb
+++ b/app/services/merge_requests/after_create_service.rb
@@ -7,7 +7,9 @@ module MergeRequests
def execute(merge_request)
merge_request.ensure_merge_request_diff
+ logger.info(**log_payload(merge_request, 'Executing hooks'))
execute_hooks(merge_request)
+ logger.info(**log_payload(merge_request, 'Executed hooks'))
prepare_for_mergeability(merge_request)
prepare_merge_request(merge_request)
@@ -17,7 +19,9 @@ module MergeRequests
private
def prepare_for_mergeability(merge_request)
+ logger.info(**log_payload(merge_request, 'Creating pipeline'))
create_pipeline_for(merge_request, current_user)
+ logger.info(**log_payload(merge_request, 'Pipeline created'))
merge_request.update_head_pipeline
check_mergeability(merge_request)
end
@@ -58,6 +62,17 @@ module MergeRequests
def mark_merge_request_as_prepared(merge_request)
merge_request.update!(prepared_at: Time.current)
end
+
+ def logger
+ @logger ||= Gitlab::AppLogger
+ end
+
+ def log_payload(merge_request, message)
+ Gitlab::ApplicationContext.current.merge(
+ merge_request_id: merge_request.id,
+ message: message
+ )
+ end
end
end
diff --git a/app/services/object_storage/delete_stale_direct_uploads_service.rb b/app/services/object_storage/delete_stale_direct_uploads_service.rb
new file mode 100644
index 00000000000..e9560753fc4
--- /dev/null
+++ b/app/services/object_storage/delete_stale_direct_uploads_service.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+module ObjectStorage
+ class DeleteStaleDirectUploadsService < BaseService
+ MAX_EXEC_DURATION = 250.seconds.freeze
+
+ def initialize; end
+
+ def execute
+ total_pending_entries = ObjectStorage::PendingDirectUpload.count
+ total_deleted_stale_entries = 0
+
+ timeout = false
+ start = Time.current
+
+ ObjectStorage::PendingDirectUpload.each do |pending_upload|
+ if pending_upload.stale?
+ pending_upload.delete
+ total_deleted_stale_entries += 1
+ end
+
+ if (Time.current - start) > MAX_EXEC_DURATION
+ timeout = true
+ break
+ end
+ end
+
+ success(
+ total_pending_entries: total_pending_entries,
+ total_deleted_stale_entries: total_deleted_stale_entries,
+ execution_timeout: timeout
+ )
+ end
+ end
+end
diff --git a/app/workers/all_queues.yml b/app/workers/all_queues.yml
index e6ad18181cd..c939da9325c 100644
--- a/app/workers/all_queues.yml
+++ b/app/workers/all_queues.yml
@@ -597,6 +597,15 @@
:weight: 1
:idempotent: false
:tags: []
+- :name: cronjob:object_storage_delete_stale_direct_uploads
+ :worker_name: ObjectStorage::DeleteStaleDirectUploadsWorker
+ :feature_category: :build_artifacts
+ :has_external_dependencies: false
+ :urgency: :low
+ :resource_boundary: :unknown
+ :weight: 1
+ :idempotent: true
+ :tags: []
- :name: cronjob:packages_cleanup_delete_orphaned_dependencies
:worker_name: Packages::Cleanup::DeleteOrphanedDependenciesWorker
:feature_category: :package_registry
diff --git a/app/workers/object_storage/delete_stale_direct_uploads_worker.rb b/app/workers/object_storage/delete_stale_direct_uploads_worker.rb
new file mode 100644
index 00000000000..216df498cc2
--- /dev/null
+++ b/app/workers/object_storage/delete_stale_direct_uploads_worker.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module ObjectStorage
+ class DeleteStaleDirectUploadsWorker
+ include ApplicationWorker
+
+ data_consistency :sticky
+ # rubocop:disable Scalability/CronWorkerContext
+ # This worker does not perform work scoped to a context
+ include CronjobQueue
+ # rubocop:enable Scalability/CronWorkerContext
+
+ # TODO: Determine proper feature category for this, as object storage is a shared feature.
+ # For now, only build artifacts use this worker.
+ feature_category :build_artifacts
+ idempotent!
+ deduplicate :until_executed
+
+ def perform
+ return unless Feature.enabled?(:stale_pending_direct_uploads_cleaner)
+
+ result = ObjectStorage::DeleteStaleDirectUploadsService.new.execute
+
+ log_extra_metadata_on_done(:total_pending_entries, result[:total_pending_entries])
+ log_extra_metadata_on_done(:total_stale_entries, result[:total_stale_entries])
+ log_extra_metadata_on_done(:execution_timeout, result[:execution_timeout])
+ end
+ end
+end
diff --git a/config/events/1671198983_Gitlab__UsageDataCounters__MergeRequestActivityUniqueCounter_create.yml b/config/events/1671198983_Gitlab__UsageDataCounters__MergeRequestActivityUniqueCounter_create.yml
index 85e969daddd..621ce68132c 100644
--- a/config/events/1671198983_Gitlab__UsageDataCounters__MergeRequestActivityUniqueCounter_create.yml
+++ b/config/events/1671198983_Gitlab__UsageDataCounters__MergeRequestActivityUniqueCounter_create.yml
@@ -1,8 +1,8 @@
---
-description: Count of unique users creating a merge request per month
-category: Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter
-action: create
-label_description: "Mirrored RedisHLL i_code_review_user_create_mr_monthly events sent to Snowplow"
+description: A merge request was created
+category: InternalEventTracking
+action: i_code_review_user_create_mr
+label_description:
property_description:
value_description:
extra_properties:
@@ -18,7 +18,7 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/106869
distributions:
- ce
- ee
-tiers:
+tiers:
- free
- premium
- ultimate
diff --git a/config/feature_flags/development/stale_pending_direct_uploads_cleaner.yml b/config/feature_flags/development/stale_pending_direct_uploads_cleaner.yml
new file mode 100644
index 00000000000..f9bf9833f20
--- /dev/null
+++ b/config/feature_flags/development/stale_pending_direct_uploads_cleaner.yml
@@ -0,0 +1,8 @@
+---
+name: stale_pending_direct_uploads_cleaner
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/117746
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/385447
+milestone: '16.1'
+type: development
+group: group::pipeline security
+default_enabled: false
diff --git a/config/initializers/1_settings.rb b/config/initializers/1_settings.rb
index 76935b5903c..83403a0a631 100644
--- a/config/initializers/1_settings.rb
+++ b/config/initializers/1_settings.rb
@@ -685,6 +685,9 @@ Settings.cron_jobs['cleanup_dangling_debian_package_files_worker']['job_class']
Settings.cron_jobs['global_metrics_update_worker'] ||= {}
Settings.cron_jobs['global_metrics_update_worker']['cron'] ||= '*/1 * * * *'
Settings.cron_jobs['global_metrics_update_worker']['job_class'] ||= 'Metrics::GlobalMetricsUpdateWorker'
+Settings.cron_jobs['object_storage_delete_stale_direct_uploads_worker'] ||= {}
+Settings.cron_jobs['object_storage_delete_stale_direct_uploads_worker']['cron'] ||= '*/6 * * * *'
+Settings.cron_jobs['object_storage_delete_stale_direct_uploads_worker']['job_class'] = 'ObjectStorage::DeleteStaleDirectUploadsWorker'
Gitlab.ee do
Settings.cron_jobs['analytics_devops_adoption_create_all_snapshots_worker'] ||= {}
diff --git a/db/migrate/20230523125430_add_dismissal_reason_to_vulnerability_read.rb b/db/migrate/20230523125430_add_dismissal_reason_to_vulnerability_read.rb
new file mode 100644
index 00000000000..b9e437f0169
--- /dev/null
+++ b/db/migrate/20230523125430_add_dismissal_reason_to_vulnerability_read.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class AddDismissalReasonToVulnerabilityRead < Gitlab::Database::Migration[2.1]
+ enable_lock_retries!
+
+ def up
+ add_column :vulnerability_reads, :dismissal_reason, :smallint
+ end
+
+ def down
+ remove_column :vulnerability_reads, :dismissal_reason
+ end
+end
diff --git a/db/post_migrate/20230523131914_recreate_index_on_vulnerability_reads.rb b/db/post_migrate/20230523131914_recreate_index_on_vulnerability_reads.rb
new file mode 100644
index 00000000000..13f30b5ef6e
--- /dev/null
+++ b/db/post_migrate/20230523131914_recreate_index_on_vulnerability_reads.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+class RecreateIndexOnVulnerabilityReads < Gitlab::Database::Migration[2.1]
+ OLD_INDEX_NAME = "index_vulnerability_reads_common_finder_query"
+ NEW_INDEX_NAME = "index_vulnerability_reads_common_finder_query_2"
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index(
+ :vulnerability_reads,
+ %i[project_id state report_type severity vulnerability_id dismissal_reason],
+ name: NEW_INDEX_NAME,
+ order: { vulnerability_id: :desc }
+ )
+ remove_concurrent_index_by_name(
+ :vulnerability_reads,
+ OLD_INDEX_NAME
+ )
+ end
+
+ def down
+ add_concurrent_index(
+ :vulnerability_reads,
+ %i[project_id state report_type severity vulnerability_id],
+ name: OLD_INDEX_NAME,
+ order: { vulnerability_id: :desc }
+ )
+ remove_concurrent_index_by_name(
+ :vulnerability_reads,
+ NEW_INDEX_NAME
+ )
+ end
+end
diff --git a/db/post_migrate/20230523132647_recreate_index_on_vulnerability_reads2.rb b/db/post_migrate/20230523132647_recreate_index_on_vulnerability_reads2.rb
new file mode 100644
index 00000000000..0624ad22cf2
--- /dev/null
+++ b/db/post_migrate/20230523132647_recreate_index_on_vulnerability_reads2.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+class RecreateIndexOnVulnerabilityReads2 < Gitlab::Database::Migration[2.1]
+ OLD_INDEX_NAME = "index_vulnerability_reads_common_finder_query_with_namespace_id"
+ NEW_INDEX_NAME = "index_vulnerability_reads_common_finder_query_w_namespace_id"
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index(
+ :vulnerability_reads,
+ %i[namespace_id state report_type severity vulnerability_id dismissal_reason],
+ name: NEW_INDEX_NAME,
+ order: { vulnerability_id: :desc }
+ )
+ remove_concurrent_index_by_name(
+ :vulnerability_reads,
+ OLD_INDEX_NAME
+ )
+ end
+
+ def down
+ add_concurrent_index(
+ :vulnerability_reads,
+ %i[namespace_id state report_type severity vulnerability_id],
+ name: OLD_INDEX_NAME,
+ order: { vulnerability_id: :desc }
+ )
+ remove_concurrent_index_by_name(
+ :vulnerability_reads,
+ NEW_INDEX_NAME
+ )
+ end
+end
diff --git a/db/schema_migrations/20230523125430 b/db/schema_migrations/20230523125430
new file mode 100644
index 00000000000..9d0b8e8d289
--- /dev/null
+++ b/db/schema_migrations/20230523125430
@@ -0,0 +1 @@
+75c06f09e255289919d30c3cb9514c9c76f8510c689a23f4341bcd7948b8dae4 \ No newline at end of file
diff --git a/db/schema_migrations/20230523131914 b/db/schema_migrations/20230523131914
new file mode 100644
index 00000000000..1b7c9249f59
--- /dev/null
+++ b/db/schema_migrations/20230523131914
@@ -0,0 +1 @@
+3e5b2d18d3e0b7c573860c256cc3e6badfd79606f6822b9a108777f2d311f108 \ No newline at end of file
diff --git a/db/schema_migrations/20230523132647 b/db/schema_migrations/20230523132647
new file mode 100644
index 00000000000..056ca2315af
--- /dev/null
+++ b/db/schema_migrations/20230523132647
@@ -0,0 +1 @@
+852a190346dc674f5ddb0890c0daa5a3066beba73c638ffa48b8cdde7f1809bc \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index e501a7bc0ad..7a04e1af4cf 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -24236,6 +24236,7 @@ CREATE TABLE vulnerability_reads (
cluster_agent_id text,
casted_cluster_agent_id bigint,
namespace_id bigint,
+ dismissal_reason smallint,
CONSTRAINT check_380451bdbe CHECK ((char_length(location_image) <= 2048)),
CONSTRAINT check_a105eb825a CHECK ((char_length(cluster_agent_id) <= 10))
);
@@ -32946,9 +32947,9 @@ CREATE UNIQUE INDEX index_vulnerability_occurrences_on_uuid ON vulnerability_occ
CREATE INDEX index_vulnerability_occurrences_on_vulnerability_id ON vulnerability_occurrences USING btree (vulnerability_id);
-CREATE INDEX index_vulnerability_reads_common_finder_query ON vulnerability_reads USING btree (project_id, state, report_type, severity, vulnerability_id DESC);
+CREATE INDEX index_vulnerability_reads_common_finder_query_2 ON vulnerability_reads USING btree (project_id, state, report_type, severity, vulnerability_id DESC, dismissal_reason);
-CREATE INDEX index_vulnerability_reads_common_finder_query_with_namespace_id ON vulnerability_reads USING btree (namespace_id, state, report_type, severity, vulnerability_id DESC);
+CREATE INDEX index_vulnerability_reads_common_finder_query_w_namespace_id ON vulnerability_reads USING btree (namespace_id, state, report_type, severity, vulnerability_id DESC, dismissal_reason);
CREATE INDEX index_vulnerability_reads_on_cluster_agent_id ON vulnerability_reads USING btree (cluster_agent_id) WHERE (report_type = 7);
diff --git a/doc/administration/auth/ldap/index.md b/doc/administration/auth/ldap/index.md
index 7687f7c9340..cdd6926268a 100644
--- a/doc/administration/auth/ldap/index.md
+++ b/doc/administration/auth/ldap/index.md
@@ -1047,7 +1047,7 @@ For more information on synchronizing users and groups between LDAP and GitLab,
1. In the configuration file, change:
- `omniauth_auto_link_user` to `saml` only.
- `omniauth_auto_link_ldap_user` to false.
- - `ldap_enabled` to `false`.
+ - `ldap_enabled` to `false`.
You can also comment out the LDAP provider settings.
## Troubleshooting
diff --git a/doc/administration/docs_self_host.md b/doc/administration/docs_self_host.md
index d1ad36880dd..e4cb1898aae 100644
--- a/doc/administration/docs_self_host.md
+++ b/doc/administration/docs_self_host.md
@@ -84,7 +84,7 @@ You can use GitLab Pages to host the GitLab product documentation.
Prerequisite:
-- Ensure the Pages site URL does not use a subfolder. Because of the way the
+- Ensure the Pages site URL does not use a subfolder. Because of the way the
site is pre-compiled, the CSS and JavaScript files are relative to the
main domain or subdomain. For example, URLs like `https://example.com/docs/`
are not supported.
diff --git a/doc/administration/geo/setup/index.md b/doc/administration/geo/setup/index.md
index eef915bb5d8..3525199226d 100644
--- a/doc/administration/geo/setup/index.md
+++ b/doc/administration/geo/setup/index.md
@@ -35,7 +35,7 @@ If both Geo sites are based on the [1K reference architecture](../../reference_a
1. Optional: [Configure a secondary LDAP server](../../auth/ldap/index.md) for the **secondary** sites. See [notes on LDAP](../index.md#ldap).
1. Optional: [Configure Geo secondary proxying](../secondary_proxy/index.md) to use a single, unified URL for all Geo sites. This step is recommended to accelerate most read requests while transparently proxying writes to the primary Geo site.
1. Follow the [Using a Geo Site](../replication/usage.md) guide.
-
+
### Multi-node Geo sites
If one or more of your sites is using the [2K reference architecture](../../reference_architectures/2k_users.md) or larger, see
diff --git a/doc/administration/monitoring/prometheus/gitlab_metrics.md b/doc/administration/monitoring/prometheus/gitlab_metrics.md
index f8cab0c605e..772b10ed863 100644
--- a/doc/administration/monitoring/prometheus/gitlab_metrics.md
+++ b/doc/administration/monitoring/prometheus/gitlab_metrics.md
@@ -204,6 +204,7 @@ configuration option in `gitlab.yml`. These metrics are served from the
| `sidekiq_jobs_dead_total` | Counter | 13.7 | Sidekiq dead jobs (jobs that have run out of retries) | `queue`, `boundary`, `external_dependencies`, `feature_category`, `urgency` |
| `sidekiq_redis_requests_total` | Counter | 13.1 | Redis requests during a Sidekiq job execution | `queue`, `boundary`, `external_dependencies`, `feature_category`, `job_status`, `urgency` |
| `sidekiq_elasticsearch_requests_total` | Counter | 13.1 | Elasticsearch requests during a Sidekiq job execution | `queue`, `boundary`, `external_dependencies`, `feature_category`, `job_status`, `urgency` |
+| `sidekiq_jobs_deferred_total` | Counter | 16.1 | Number of jobs being deferred when `defer_sidekiq_jobs` feature flag is enabled | `worker` |
| `sidekiq_running_jobs` | Gauge | 12.2 | Number of Sidekiq jobs running | `queue`, `boundary`, `external_dependencies`, `feature_category`, `urgency` |
| `sidekiq_concurrency` | Gauge | 12.5 | Maximum number of Sidekiq jobs | |
| `sidekiq_mem_total_bytes` | Gauge | 15.3 | Number of bytes allocated for both objects consuming an object slot and objects that required a malloc'| |
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 4bed7bcd634..1578b5bd328 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -12403,6 +12403,7 @@ Represents the total number of issues and their weights for a particular day.
| <a id="cicatalogresourceicon"></a>`icon` **{warning-solid}** | [`String`](#string) | **Introduced** in 15.11. This feature is an Experiment. It can be changed or removed at any time. Icon for the catalog resource. |
| <a id="cicatalogresourceid"></a>`id` **{warning-solid}** | [`ID!`](#id) | **Introduced** in 15.11. This feature is an Experiment. It can be changed or removed at any time. ID of the catalog resource. |
| <a id="cicatalogresourcename"></a>`name` **{warning-solid}** | [`String`](#string) | **Introduced** in 15.11. This feature is an Experiment. It can be changed or removed at any time. Name of the catalog resource. |
+| <a id="cicatalogresourcewebpath"></a>`webPath` **{warning-solid}** | [`String`](#string) | **Introduced** in 16.1. This feature is an Experiment. It can be changed or removed at any time. Web path of the catalog resource. |
#### Fields with arguments
@@ -18648,6 +18649,7 @@ Represents the network policy.
| Name | Type | Description |
| ---- | ---- | ----------- |
| <a id="noteauthor"></a>`author` | [`UserCore!`](#usercore) | User who wrote this note. |
+| <a id="noteawardemoji"></a>`awardEmoji` | [`AwardEmojiConnection`](#awardemojiconnection) | List of award emojis associated with the note. (see [Connections](#connections)) |
| <a id="notebody"></a>`body` | [`String!`](#string) | Content of the note. |
| <a id="notebodyhtml"></a>`bodyHtml` | [`String`](#string) | GitLab Flavored Markdown rendering of `note`. |
| <a id="noteconfidential"></a>`confidential` **{warning-solid}** | [`Boolean`](#boolean) | **Deprecated** in 15.5. This was renamed. Use: `internal`. |
diff --git a/doc/api/namespaces.md b/doc/api/namespaces.md
index 15ce73fdbc3..ddf56209be2 100644
--- a/doc/api/namespaces.md
+++ b/doc/api/namespaces.md
@@ -54,7 +54,8 @@ Example response:
"billable_members_count": 1,
"plan": "default",
"trial_ends_on": null,
- "trial": false
+ "trial": false,
+ "root_repository_size": 100
},
{
"id": 2,
@@ -69,7 +70,8 @@ Example response:
"billable_members_count": 2,
"plan": "default",
"trial_ends_on": null,
- "trial": false
+ "trial": false,
+ "root_repository_size": 100
},
{
"id": 3,
@@ -84,7 +86,8 @@ Example response:
"billable_members_count": 5,
"plan": "default",
"trial_ends_on": null,
- "trial": false
+ "trial": false,
+ "root_repository_size": 100
}
]
```
@@ -124,7 +127,7 @@ once a day.
```
NOTE:
-Only group owners are presented with `members_count_with_descendants` and `plan`.
+Only group owners are presented with `members_count_with_descendants`, `root_repository_size` and `plan`.
## Get namespace by ID
@@ -162,7 +165,8 @@ Example response:
"seats_in_use": 0,
"plan": "default",
"trial_ends_on": null,
- "trial": false
+ "trial": false,
+ "root_repository_size": 100
}
```
@@ -190,7 +194,8 @@ Example response:
"seats_in_use": 0,
"plan": "default",
"trial_ends_on": null,
- "trial": false
+ "trial": false,
+ "root_repository_size": 100
}
```
diff --git a/doc/architecture/blueprints/cells/index.md b/doc/architecture/blueprints/cells/index.md
index 9938875adb6..94ff12f2c55 100644
--- a/doc/architecture/blueprints/cells/index.md
+++ b/doc/architecture/blueprints/cells/index.md
@@ -119,7 +119,7 @@ would be required to define a general split of data and build required tooling.
1. **User can push to Git repository.**
- The purpose is to ensure that essential joins from the projects table are properly attributed to be
+ The purpose is to ensure that essential joins from the projects table are properly attributed to be
Cell-local, and as a result the essential Git workflow is supported.
1. **User can run CI pipeline.**
@@ -159,7 +159,7 @@ This list is not exhaustive of work needed to be done.
### 4. Routing layer
The routing layer is meant to offer a consistent user experience where all Cells are presented
-under a single domain (for example, `gitlab.com`), instead of
+under a single domain (for example, `gitlab.com`), instead of
having to navigate to separate domains.
The user will able to use `https://gitlab.com` to access Cell-enabled GitLab. Depending
diff --git a/doc/architecture/blueprints/organization/index.md b/doc/architecture/blueprints/organization/index.md
index 8eb367ddfc8..bebd1be593a 100644
--- a/doc/architecture/blueprints/organization/index.md
+++ b/doc/architecture/blueprints/organization/index.md
@@ -87,14 +87,14 @@ Self-managed instances would set a default Organization.
The Organization MVC will contain the following functionality:
-- Instance setting to allow the creation of multiple Organizations. This will be enabled by default on GitLab.com, and disabled for self-managed GitLab.
+- Instance setting to allow the creation of multiple Organizations. This will be enabled by default on GitLab.com, and disabled for self-managed GitLab.
- Every instance will have a default organization. Initially, all users will be managed by this default Organization.
- Organization Owner. The creation of an Organization appoints that user as the Organization Owner. Once established, the Organization Owner can appoint other Organization Owners.
- Organization users. A user is managed by one Organization, but can be part of multiple Organizations. Users are able to navigate between the different Organizations they are part of.
- Setup settings. Containing the Organization name, ID, description, README, and avatar. Settings are editable by the Organization Owner.
- Setup flow. Users are able to build an Organization on top of an existing top-level group. New users are able to create an Organization from scratch and to start building top-level groups from there.
- Visibility. Options will be `public` and `private`. A nonuser of a specific Organization will not see private Organizations in the explore section. Visibility is editable by the Organization Owner.
-- Organization settings page with the added ability to remove an Organization. Deletion of the default Organization is prevented.
+- Organization settings page with the added ability to remove an Organization. Deletion of the default Organization is prevented.
- Groups. This includes the ability to create, edit, and delete groups, as well as a Groups overview that can be accessed by the Organization Owner.
- Projects. This includes the ability to create, edit, and delete projects, as well as a Projects overview that can be accessed by the Organization Owner.
@@ -146,9 +146,9 @@ In iteration 2, an Organization MVC Experiment will be released. We will test th
### Iteration 3: Organization MVC Beta (FY24Q4)
-In iteration 3, the Organization MVC Beta will be released.
+In iteration 3, the Organization MVC Beta will be released.
-- Multiple Organization Owners can be assigned.
+- Multiple Organization Owners can be assigned.
- Enterprise users can be added to an Organization.
### Iteration 4: Organization MVC GA (FY25Q1)
@@ -160,7 +160,7 @@ After the initial rollout of Organizations, the following functionality will be
1. Internal visibility will be made available on Organizations that are part of GitLab.com.
1. Move billing from top-level group to Organization.
1. Audit events at the Organization level.
-1. Set merge request approval rules at the Organization level and cascade to all groups and projects.
+1. Set merge request approval rules at the Organization level and cascade to all groups and projects.
1. Security policies at the Organization level.
1. Vulnerability reports at the Organization level.
1. Cascading Organization setting to enforce security scans.
diff --git a/doc/ci/runners/runners_scope.md b/doc/ci/runners/runners_scope.md
index 43204b463b3..e36d57108ab 100644
--- a/doc/ci/runners/runners_scope.md
+++ b/doc/ci/runners/runners_scope.md
@@ -95,7 +95,7 @@ To disable shared runners for a group:
select **Allow projects and subgroups to override the group setting**.
NOTE:
-If you re-enable the shared runners for a group after you disable them, a user with the
+If you re-enable the shared runners for a group after you disable them, a user with the
Owner or Maintainer role must manually change this setting for each project subgroup or project.
### How shared runners pick jobs
diff --git a/doc/ci/runners/saas/linux_saas_runner.md b/doc/ci/runners/saas/linux_saas_runner.md
index f1ae1a368bc..055cf651067 100644
--- a/doc/ci/runners/saas/linux_saas_runner.md
+++ b/doc/ci/runners/saas/linux_saas_runner.md
@@ -89,7 +89,8 @@ Below are the settings for SaaS runners on Linux.
[distributed cache](https://docs.gitlab.com/runner/configuration/autoscale.html#distributed-runners-caching)
that's stored in a Google Cloud Storage (GCS) bucket. Cache contents not updated in
the last 14 days are automatically removed, based on the
- [object lifecycle management policy](https://cloud.google.com/storage/docs/lifecycle).
+ [object lifecycle management policy](https://cloud.google.com/storage/docs/lifecycle). The maximum size of an
+ uploaded cache artifact can be 5GB after the cache becomes a compressed archive.
- **Timeout settings**: Jobs handled by the SaaS Runners on Linux
**time out after 3 hours**, regardless of the timeout configured in a
diff --git a/doc/development/ai_architecture.md b/doc/development/ai_architecture.md
index e9994c8a6f4..ac62f50baf5 100644
--- a/doc/development/ai_architecture.md
+++ b/doc/development/ai_architecture.md
@@ -84,7 +84,7 @@ Web --> AIF
GitLab currently operates a cloud-hosted AI architecture. We are exploring how self-managed instances integrate with it.
-There are two primary reasons for this: the best AI models are cloud-based as they often depend on specialized hardware designed for this purpose, and operating self-managed infrastructure capable of AI at-scale and with appropriate performance is a significant undertaking. We are actively [tracking self-managed customers interested in AI](https://gitlab.com/gitlab-org/gitlab/-/issues/409183).
+There are two primary reasons for this: the best AI models are cloud-based as they often depend on specialized hardware designed for this purpose, and operating self-managed infrastructure capable of AI at-scale and with appropriate performance is a significant undertaking. We are actively [tracking self-managed customers interested in AI](https://gitlab.com/gitlab-org/gitlab/-/issues/409183).
## Supported technologies
diff --git a/doc/gitlab-basics/start-using-git.md b/doc/gitlab-basics/start-using-git.md
index fd322b67abe..c824fc2e44f 100644
--- a/doc/gitlab-basics/start-using-git.md
+++ b/doc/gitlab-basics/start-using-git.md
@@ -276,7 +276,7 @@ git checkout -b <name-of-branch>
```
GitLab enforces [branch naming rules](../user/project/repository/branches/index.md#name-your-branch)
-to prevent problems, and provides
+to prevent problems, and provides
[branch naming patterns](../user/project/repository/branches/index.md#prefix-branch-names-with-issue-numbers)
to streamline merge request creation.
diff --git a/doc/raketasks/backup_gitlab.md b/doc/raketasks/backup_gitlab.md
index 890cbaf8b3b..6a17a808983 100644
--- a/doc/raketasks/backup_gitlab.md
+++ b/doc/raketasks/backup_gitlab.md
@@ -21,6 +21,7 @@ including:
- Packages ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/332006) in GitLab 14.7)
- Snippets
- [Group wikis](../user/project/wiki/group.md)
+- Project-level Secure Files ([introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121142) in GitLab 16.1)
Backups do not include:
@@ -240,6 +241,7 @@ You can exclude specific directories from the backup by adding the environment v
- `pages` (Pages content)
- `repositories` (Git repositories data)
- `packages` (Packages)
+- `ci_secure_files` (Project-level Secure Files)
NOTE:
When [backing up and restoring Helm Charts](https://docs.gitlab.com/charts/architecture/backup-restore.html), there is an additional option `packages`, which refers to any packages managed by the GitLab [package registry](../user/packages/package_registry/index.md).
diff --git a/doc/topics/gitlab_flow.md b/doc/topics/gitlab_flow.md
index eb298841247..f40d59ad7e4 100644
--- a/doc/topics/gitlab_flow.md
+++ b/doc/topics/gitlab_flow.md
@@ -311,7 +311,7 @@ In GitLab Flow, you can configure your pipeline to run every time you commit cha
When you are ready to merge your feature branch, assign the merge request to a maintainer for the project.
Also, mention any other people from whom you would like feedback.
After the assigned person feels comfortable with the result, they can merge the branch.
-In GitLab Flow, a [merged results pipeline](../ci/pipelines/merged_results_pipelines.md) runs against the results of the source and target branches merged together.
+In GitLab Flow, a [merged results pipeline](../ci/pipelines/merged_results_pipelines.md) runs against the results of the source and target branches merged together.
If the assigned person does not feel comfortable, they can request more changes or close the merge request without merging.
NOTE:
@@ -513,7 +513,7 @@ The words "change," "improve," "fix," and "refactor" don't add much information
For more information, see Tim Pope's excellent [note about formatting commit messages](https://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html).
To add more context to a commit message, consider adding information regarding the
-origin of the change, such the GitLab issue URL or Jira issue number. That way, you provide
+origin of the change, such the GitLab issue URL or Jira issue number. That way, you provide
more information for users who need in-depth context about the change.
For example:
diff --git a/doc/tutorials/configure_gitlab_runner_to_use_gke/index.md b/doc/tutorials/configure_gitlab_runner_to_use_gke/index.md
new file mode 100644
index 00000000000..bf8f25081c4
--- /dev/null
+++ b/doc/tutorials/configure_gitlab_runner_to_use_gke/index.md
@@ -0,0 +1,201 @@
+---
+stage: Verify
+group: Runner
+info: To determine the technical writer assigned to the Stage/Group associated with this page, see https://about.gitlab.com/handbook/product/ux/technical-writing/#assignments
+---
+
+# Tutorial: Configure GitLab Runner to use the Google Kubernetes Engine
+
+This tutorial describes how to configure GitLab Runner to use the Google Kubernetes Engine (GKE)
+to run jobs.
+
+In this tutorial, you configure GitLab Runner to run jobs in the following [GKE cluster modes](https://cloud.google.com/kubernetes-engine/docs/concepts/types-of-clusters):
+
+- Autopilot
+- Standard
+
+To configure GitLab Runner to use the GKE:
+
+1. [Set up your environment](#set-up-your-environment).
+1. [Create and connect to a cluster](#create-and-connect-to-a-cluster).
+1. [Install and configure the Kubernetes Operator](#install-and-configure-the-kubernetes-operator).
+1. Optional. [Verify that the configuration was successful](#verify-your-configuration).
+
+## Prerequisites
+
+Before you can configure GitLab Runner to use the GKE you must:
+
+- Have a project where you have the Maintainer or Owner role. If you don't have a project, you can [create it](../../user/project/index.md).
+- [Obtain the project runner registration token](../../ci/runners/register_runner.md#generate-a-registration-token-deprecated).
+- Install GitLab Runner.
+
+## Set up your environment
+
+Install the tools to configure and use GitLab Runner in the GKE.
+
+1. [Install and configure Google Cloud CLI](https://cloud.google.com/sdk/docs/install). You use Google Cloud CLI to connect to the cluster.
+1. [Install and configure kubectl](https://kubernetes.io/docs/tasks/tools/). You use kubectl to communicate with the remote cluster from your local environment.
+
+## Create and connect to a cluster
+
+This step describes how to create a cluster and connect to it. After you connect to the cluster, you use kubectl to interact with it
+and, for autopilot clusters, to add configurations that specify which jobs to run.
+
+1. In the Google Cloud Platform, create an [autopilot](https://cloud.google.com/kubernetes-engine/docs/how-to/creating-an-autopilot-cluster) or [standard](https://cloud.google.com/kubernetes-engine/docs/how-to/creating-a-zonal-cluster) cluster.
+
+1. Install the kubectl authentication plugin:
+
+ ```shell
+ gcloud components install gke-gcloud-auth-plugin
+ ```
+
+1. Connect to the cluster:
+
+ ```shell
+ gcloud container clusters get-credentials CLUSTER_NAME --zone=CLUSTER_LOCATION
+ ```
+
+1. View the cluster configuration:
+
+ ```shell
+ kubectl config view
+ ```
+
+1. Verify that you are connected to the cluster:
+
+ ```shell
+ kubectl config view current-context
+ ```
+
+## Install and configure the Kubernetes Operator
+
+Now that you have a cluster, you're ready to install and configure the Kubernetes Operator.
+
+1. Install the prerequisites:
+
+ ```shell
+ kubectl apply -f https://github.com/jetstack/cert-manager/releases/download/v1.7.1/cert-manager.yaml
+ ```
+
+1. Install the Operator Lifecycle Manager (OLM), a tool that manages the Kubernetes Operators that
+ run on the cluster:
+
+ <!-- markdownlint-disable -->
+
+ ```shell
+ curl --silent --location "https://github.com/operator-framework/operator-lifecycle-manager/releases/download/v0.24.0/install.sh" \
+ | bash -s v0.24.0
+ ```
+
+ <!-- markdownlint-enable -->
+
+1. Install the Kubernetes Operator Catalog:
+
+ ```shell
+ kubectl create -f https://raw.githubusercontent.com/operator-framework/operator-lifecycle-manager/master/deploy/upstream/quickstart/crds.yaml
+ kubectl create -f https://raw.githubusercontent.com/operator-framework/operator-lifecycle-manager/master/deploy/upstream/quickstart/olm.yaml
+ ```
+
+1. Install the Kubernetes Operator:
+
+ ```shell
+ kubectl create -f https://operatorhub.io/install/gitlab-runner-operator.yaml
+ ```
+
+1. Create a secret that contains the `runner-registration-token` from your
+ GitLab project:
+
+ ```shell
+ cat > gitlab-runner-secret.yml << EOF
+ apiVersion: v1
+ kind: Secret
+ metadata:
+ name: gitlab-runner-secret
+ type: Opaque
+ stringData:
+ runner-registration-token: YOUR_RUNNER_REGISTRATION_TOKEN
+ EOF
+ ```
+
+1. Apply the secret:
+
+ ```shell
+ kubectl apply -f gitlab-runner-secret.yml
+ ```
+
+1. For autopilot clusters, you must create a YAML file with additional
+ configuration details. Autopilot clusters use this file to instruct the
+ GKE about what resources the Pod needs so it can run the jobs. You don't
+ need to create this file for standard clusters. Here is an example configuration:
+
+ ```shell
+ cat > config.yml << EOF
+ apiVersion: v1
+ kind: configMaps
+ metadata:
+ name: config.toml
+ config: |
+ [[runners]]
+ [runners.kubernetes]
+ image = "alpine"
+ cpu_limit = "1"
+ memory_limit = "128Mi"
+ service_cpu_limit = "1"
+ service_memory_limit = "128Mi"
+ helper_cpu_limit = "500m"
+ helper_memory_limit = "100Mi"
+ ```
+
+1. Apply the `config.yml`:
+
+ ```shell
+ kubectl apply -f config.yml
+ ```
+
+1. Create the custom resource definition file and include the following information:
+
+ ```shell
+ cat > gitlab-runner.yml << EOF
+ apiVersion: apps.gitlab.com/v1beta2
+ kind: Runner
+ metadata:
+ name: gitlab-runner
+ spec:
+ gitlabUrl: https://gitlab.example.com
+ buildImage: alpine
+ config: "config.toml" # <---- Reference to the config.toml configMap
+ token: gitlab-runner-secret
+ EOF
+ ```
+
+1. Apply the custom resource definition file:
+
+ ```shell
+ kubectl apply -f gitlab-runner.yml
+ ```
+
+That's it! You've configured GitLab Runner to use the GKE.
+In the next step, you can check if your configuration is working.
+
+## Verify your configuration
+
+To check if runners are running in the GKE cluster, you can either:
+
+- Use the following command:
+
+ ```shell
+ kubectl get pods
+ ```
+
+ You should see the following output, which shows your runners
+ are running in the GKE cluster:
+
+ ```plaintext
+ NAME READY STATUS RESTARTS AGE
+ gitlab-runner-bf9894bdb-wplxn 1/1 Running 0 5m
+ ```
+
+- Check the job log in GitLab:
+ 1. On the top bar, select **Main menu > Projects** and find the project.
+ 1. On the left sidebar, select **CI/CD > Jobs** and find the job.
+ 1. To view the job log, select the job status.
diff --git a/doc/update/zero_downtime.md b/doc/update/zero_downtime.md
index 0eb7a520850..c815087b0b3 100644
--- a/doc/update/zero_downtime.md
+++ b/doc/update/zero_downtime.md
@@ -94,7 +94,7 @@ meet the other online upgrade requirements mentioned above.
WARNING:
You can only upgrade one minor release at a time. So from 15.6 to 15.7, not to 15.8.
-If you attempt more than one minor release, the upgrade may fail.
+If you attempt more than one minor release, the upgrade may fail.
### Use a load balancer in front of web (Puma) nodes
diff --git a/doc/user/admin_area/settings/usage_statistics.md b/doc/user/admin_area/settings/usage_statistics.md
index ef34bfbc151..f901ce46bf0 100644
--- a/doc/user/admin_area/settings/usage_statistics.md
+++ b/doc/user/admin_area/settings/usage_statistics.md
@@ -212,6 +212,7 @@ If there are problems with the manual upload:
1. Open a confidential issue in the [security fork of version app project](https://gitlab.com/gitlab-org/security/version.gitlab.com).
1. Attach the JSON payload if possible.
1. Tag `@gitlab-org/analytics-section/analytics-instrumentation` who will triage the issue.
+
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
diff --git a/doc/user/group/compliance_frameworks.md b/doc/user/group/compliance_frameworks.md
index 77fca862a5b..5e76db35d5a 100644
--- a/doc/user/group/compliance_frameworks.md
+++ b/doc/user/group/compliance_frameworks.md
@@ -400,7 +400,7 @@ This configuration doesn't overwrite the compliance pipeline and you get the fol
### Prefilled variables are not shown
-Because of a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/382857),
+Because of a [known issue](https://gitlab.com/gitlab-org/gitlab/-/issues/382857),
compliance pipelines in GitLab 15.3 and later can prevent
[prefilled variables](../../ci/pipelines/index.md#prefill-variables-in-manual-pipelines)
from appearing when manually starting a pipeline.
diff --git a/doc/user/group/manage.md b/doc/user/group/manage.md
index 3549818e611..508b5b27600 100644
--- a/doc/user/group/manage.md
+++ b/doc/user/group/manage.md
@@ -427,7 +427,7 @@ This setting enables users in the group to access [Code Suggestions](../project/
This setting [cascades to all projects](../project/merge_requests/approvals/settings.md#settings-cascading)
that belong to the group.
-However, each user can enable or disable Code Suggestions for themselves.
+However, each user can enable or disable Code Suggestions for themselves.
To enable Code Suggestions for a group:
diff --git a/doc/user/project/repository/code_suggestions.md b/doc/user/project/repository/code_suggestions.md
index f22bb615036..e567f157fa3 100644
--- a/doc/user/project/repository/code_suggestions.md
+++ b/doc/user/project/repository/code_suggestions.md
@@ -60,7 +60,7 @@ Usage of Code Suggestions is governed by the [GitLab Testing Agreement](https://
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/121079) in GitLab 16.1 as [Beta](/ee/policy/alpha-beta-support.md#beta).
-Each user can enable Code Suggestions for themselves:
+Each user can enable Code Suggestions for themselves:
1. On the top bar, in the upper-right corner, select your avatar.
1. On the left sidebar, select **Preferences**.
diff --git a/lib/api/entities/namespace.rb b/lib/api/entities/namespace.rb
index 15bc7d158c4..5e0630e0f7f 100644
--- a/lib/api/entities/namespace.rb
+++ b/lib/api/entities/namespace.rb
@@ -10,6 +10,14 @@ module API
def expose_members_count_with_descendants?(namespace, opts)
namespace.kind == 'group' && Ability.allowed?(opts[:current_user], :admin_group, namespace)
end
+
+ expose :root_repository_size, documentation: { type: 'integer', example: 123 }, if: -> (namespace, opts) { expose_root_repository_size?(namespace, opts) } do |namespace, _|
+ namespace.root_storage_statistics&.repository_size
+ end
+
+ def expose_root_repository_size?(namespace, opts)
+ namespace.kind == 'group' && Ability.allowed?(opts[:current_user], :admin_group, namespace)
+ end
end
end
end
diff --git a/lib/atlassian/jira_issue_key_extractor.rb b/lib/atlassian/jira_issue_key_extractor.rb
index 881ba4544b2..17fa40e5676 100644
--- a/lib/atlassian/jira_issue_key_extractor.rb
+++ b/lib/atlassian/jira_issue_key_extractor.rb
@@ -12,7 +12,9 @@ module Atlassian
end
def issue_keys
- @text.scan(@match_regex).flatten.uniq
+ return @text.scan(@match_regex).flatten.uniq if @match_regex.is_a?(Regexp)
+
+ @match_regex.scan(@text).flatten.uniq
end
end
end
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index b5e1634004a..23342529933 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -176,6 +176,11 @@ module Backup
human_name: _('packages'),
destination_path: 'packages.tar.gz',
task: build_files_task(Settings.packages.storage_path, excludes: ['tmp'])
+ ),
+ 'ci_secure_files' => TaskDefinition.new(
+ human_name: _('ci secure files'),
+ destination_path: 'ci_secure_files.tar.gz',
+ task: build_files_task(Settings.ci_secure_files.storage_path, excludes: ['tmp'])
)
}.freeze
end
diff --git a/lib/gitlab/bitbucket_import/importer.rb b/lib/gitlab/bitbucket_import/importer.rb
index 592e75b1430..e785ce558db 100644
--- a/lib/gitlab/bitbucket_import/importer.rb
+++ b/lib/gitlab/bitbucket_import/importer.rb
@@ -10,6 +10,8 @@ module Gitlab
attr_reader :project, :client, :errors, :users
+ ALREADY_IMPORTED_CACHE_KEY = 'bitbucket_cloud-importer/already-imported/%{project}/%{collection}'
+
def initialize(project)
@project = project
@client = Bitbucket::Client.new(project.import_data.credentials)
@@ -31,6 +33,18 @@ module Gitlab
private
+ def already_imported?(collection, iid)
+ Gitlab::Cache::Import::Caching.set_includes?(cache_key(collection), iid)
+ end
+
+ def mark_as_imported(collection, iid)
+ Gitlab::Cache::Import::Caching.set_add(cache_key(collection), iid)
+ end
+
+ def cache_key(collection)
+ format(ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: collection)
+ end
+
def handle_errors
return unless errors.any?
@@ -97,6 +111,8 @@ module Gitlab
issue_type_id = ::WorkItems::Type.default_issue_type.id
client.issues(repo).each_with_index do |issue, index|
+ next if already_imported?(:issues, issue.iid)
+
# If a user creates an issue while the import is in progress, this can lead to an import failure.
# The workaround is to allocate IIDs before starting the importer.
allocate_issues_internal_id!(project, client) if index == 0
@@ -127,6 +143,8 @@ module Gitlab
updated_at: issue.updated_at
)
+ mark_as_imported(:issues, issue.iid)
+
metrics.issues_counter.increment
gitlab_issue.labels << @labels[label_name]
@@ -179,6 +197,8 @@ module Gitlab
pull_requests = client.pull_requests(repo)
pull_requests.each do |pull_request|
+ next if already_imported?(:pull_requests, pull_request.iid)
+
import_pull_request(pull_request)
end
end
@@ -209,6 +229,8 @@ module Gitlab
updated_at: pull_request.updated_at
)
+ mark_as_imported(:pull_requests, pull_request.iid)
+
metrics.merge_requests_counter.increment
import_pull_request_comments(pull_request, merge_request) if merge_request.persisted?
diff --git a/lib/gitlab/internal_events.rb b/lib/gitlab/internal_events.rb
new file mode 100644
index 00000000000..cde83068de1
--- /dev/null
+++ b/lib/gitlab/internal_events.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module InternalEvents
+ class << self
+ include Gitlab::Tracking::Helpers
+
+ def track_event(event_name, **kwargs)
+ user_id = kwargs.delete(:user_id)
+ UsageDataCounters::HLLRedisCounter.track_event(event_name, values: user_id)
+
+ project_id = kwargs.delete(:project_id)
+ namespace_id = kwargs.delete(:namespace_id)
+
+ namespace = Namespace.find(namespace_id) if namespace_id
+
+ standard_context = Tracking::StandardContext.new(
+ project_id: project_id,
+ user_id: user_id,
+ namespace_id: namespace&.id,
+ plan_name: namespace&.actual_plan_name
+ ).to_context
+
+ service_ping_context = Tracking::ServicePingContext.new(
+ data_source: :redis_hll,
+ event: event_name
+ ).to_context
+
+ track_struct_event(event_name, contexts: [standard_context, service_ping_context])
+ end
+
+ private
+
+ def track_struct_event(event_name, contexts:)
+ category = 'InternalEventTracking'
+ tracker = Gitlab::Tracking.tracker
+ tracker.event(category, event_name, context: contexts)
+ rescue StandardError => error
+ Gitlab::ErrorTracking
+ .track_and_raise_for_dev_exception(error, snowplow_category: category, snowplow_action: event_name)
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/sidekiq_logging/structured_logger.rb b/lib/gitlab/sidekiq_logging/structured_logger.rb
index 7ce3f6b5ccb..03c8981440b 100644
--- a/lib/gitlab/sidekiq_logging/structured_logger.rb
+++ b/lib/gitlab/sidekiq_logging/structured_logger.rb
@@ -76,15 +76,17 @@ module Gitlab
payload['load_balancing_strategy'] = job['load_balancing_strategy'] if job['load_balancing_strategy']
payload['dedup_wal_locations'] = job['dedup_wal_locations'] if job['dedup_wal_locations'].present?
- if job_exception
- payload['message'] = "#{message}: fail: #{payload['duration_s']} sec"
- payload['job_status'] = 'fail'
-
- Gitlab::ExceptionLogFormatter.format!(job_exception, payload)
- else
- payload['message'] = "#{message}: done: #{payload['duration_s']} sec"
- payload['job_status'] = 'done'
- end
+ job_status = if job_exception
+ 'fail'
+ elsif job['deferred']
+ 'deferred'
+ else
+ 'done'
+ end
+
+ payload['message'] = "#{message}: #{job_status}: #{payload['duration_s']} sec"
+ payload['job_status'] = job_status
+ Gitlab::ExceptionLogFormatter.format!(job_exception, payload) if job_exception
db_duration = ActiveRecord::LogSubscriber.runtime
payload['db_duration_s'] = Gitlab::Utils.ms_to_round_sec(db_duration)
diff --git a/lib/gitlab/sidekiq_middleware/defer_jobs.rb b/lib/gitlab/sidekiq_middleware/defer_jobs.rb
index 62a16d60df7..e91621f96ce 100644
--- a/lib/gitlab/sidekiq_middleware/defer_jobs.rb
+++ b/lib/gitlab/sidekiq_middleware/defer_jobs.rb
@@ -11,12 +11,9 @@ module Gitlab
# is turned off (or when Feature.enabled? returns false by chance while using `percentage of time` value)
def call(worker, job, _queue)
if defer_job?(worker)
- Sidekiq.logger.info(
- class: worker.class.name,
- job_id: job['jid'],
- message: "Deferring #{worker.class.name} for #{DELAY} s with arguments (#{job['args'].inspect})"
- )
+ job['deferred'] = true # for logging job_status
worker.class.perform_in(DELAY, *job['args'])
+ counter.increment({ worker: worker.class.name })
return
end
@@ -27,6 +24,12 @@ module Gitlab
Feature.enabled?(:"#{FEATURE_FLAG_PREFIX}_#{worker.class.name}", type: :worker,
default_enabled_if_undefined: false)
end
+
+ private
+
+ def counter
+ @counter ||= Gitlab::Metrics.counter(:sidekiq_jobs_deferred_total, 'The number of jobs deferred')
+ end
end
end
end
diff --git a/lib/gitlab/tracking.rb b/lib/gitlab/tracking.rb
index 52aee4d2d45..f127e14243c 100644
--- a/lib/gitlab/tracking.rb
+++ b/lib/gitlab/tracking.rb
@@ -68,6 +68,14 @@ module Gitlab
false
end
+ def tracker
+ @tracker ||= if snowplow_micro_enabled?
+ Gitlab::Tracking::Destinations::SnowplowMicro.new
+ else
+ Gitlab::Tracking::Destinations::Snowplow.new
+ end
+ end
+
private
def track_struct_event(destination, category, action, label:, property:, value:, contexts:) # rubocop:disable Metrics/ParameterLists
@@ -76,14 +84,6 @@ module Gitlab
rescue StandardError => error
Gitlab::ErrorTracking.track_and_raise_for_dev_exception(error, snowplow_category: category, snowplow_action: action)
end
-
- def tracker
- @tracker ||= if snowplow_micro_enabled?
- Gitlab::Tracking::Destinations::SnowplowMicro.new
- else
- Gitlab::Tracking::Destinations::Snowplow.new
- end
- end
end
end
end
diff --git a/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter.rb b/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter.rb
index fceeacb60ca..1ed2e891a1f 100644
--- a/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter.rb
+++ b/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter.rb
@@ -64,20 +64,15 @@ module Gitlab
end
def track_create_mr_action(user:, merge_request:)
- track_unique_action_by_user(MR_USER_CREATE_ACTION, user)
track_unique_action_by_merge_request(MR_CREATE_ACTION, merge_request)
project = merge_request.target_project
- Gitlab::Tracking.event(
- name,
- :create,
- project: project,
- namespace: project.namespace,
- user: user,
- property: MR_USER_CREATE_ACTION,
- label: 'redis_hll_counters.code_review.i_code_review_user_create_mr_monthly',
- context: [Gitlab::Tracking::ServicePingContext.new(data_source: :redis_hll,
- event: MR_USER_CREATE_ACTION).to_context]
+
+ Gitlab::InternalEvents.track_event(
+ MR_USER_CREATE_ACTION,
+ user_id: user.id,
+ project_id: project.id,
+ namespace_id: project.namespace_id
)
end
diff --git a/lib/object_storage/fog_helpers.rb b/lib/object_storage/fog_helpers.rb
new file mode 100644
index 00000000000..1db75ea24b9
--- /dev/null
+++ b/lib/object_storage/fog_helpers.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+module ObjectStorage
+ module FogHelpers
+ include ::Gitlab::Utils::StrongMemoize
+
+ def available?
+ object_store.enabled
+ end
+
+ private
+
+ def delete_object(key)
+ return unless available?
+
+ connection.delete_object(bucket_name, object_key(key))
+
+ # So far, only GoogleCloudStorage raises an exception when the file is not found.
+ # Other providers support idempotent requests and does not raise an error
+ # when the file is missing.
+ rescue ::Google::Apis::ClientError => e
+ Gitlab::ErrorTracking.log_exception(e)
+ end
+
+ def storage_location_identifier
+ raise NotImplementedError, "#{self} does not implement #{__method__}"
+ end
+
+ def object_store
+ ObjectStorage::Config::LOCATIONS.fetch(storage_location_identifier).object_store
+ end
+
+ def bucket_name
+ object_store.remote_directory
+ end
+
+ def object_key(key)
+ # We allow administrators to create "sub buckets" by setting a prefix.
+ # This makes it possible to deploy GitLab with only one object storage
+ # bucket. This mirrors the implementation in app/uploaders/object_storage.rb.
+ File.join([object_store.bucket_prefix, key].compact)
+ end
+
+ def connection
+ return unless available?
+
+ ::Fog::Storage.new(object_store.connection.to_hash.deep_symbolize_keys)
+ end
+ strong_memoize_attr :connection
+ end
+end
diff --git a/lib/object_storage/pending_direct_upload.rb b/lib/object_storage/pending_direct_upload.rb
index 3e84bc4ebc9..3a930e0e0af 100644
--- a/lib/object_storage/pending_direct_upload.rb
+++ b/lib/object_storage/pending_direct_upload.rb
@@ -2,31 +2,97 @@
module ObjectStorage
class PendingDirectUpload
+ include ObjectStorage::FogHelpers
+
KEY = 'pending_direct_uploads'
+ MAX_UPLOAD_DURATION = 3.hours.freeze
- def self.prepare(location_identifier, path)
- ::Gitlab::Redis::SharedState.with do |redis|
+ def self.prepare(location_identifier, object_storage_path)
+ with_redis do |redis|
# We need to store the location_identifier together with the timestamp to properly delete
# this object if ever this upload gets stale. The location identifier will be used
# by the clean up worker to properly generate the storage options through ObjectStorage::Config.for_location
- redis.hset(KEY, key(location_identifier, path), Time.current.utc.to_i)
+ key = redis_key(location_identifier, object_storage_path)
+ redis.hset(KEY, key, Time.current.utc.to_i)
+ log_event(:prepared, key)
+ end
+ end
+
+ def self.exists?(location_identifier, object_storage_path)
+ with_redis do |redis|
+ redis.hexists(KEY, redis_key(location_identifier, object_storage_path))
+ end
+ end
+
+ def self.complete(location_identifier, object_storage_path)
+ with_redis do |redis|
+ key = redis_key(location_identifier, object_storage_path)
+ redis.hdel(KEY, key)
+ log_event(:completed, key)
end
end
- def self.exists?(location_identifier, path)
- ::Gitlab::Redis::SharedState.with do |redis|
- redis.hexists(KEY, key(location_identifier, path))
+ def self.redis_key(location_identifier, object_storage_path)
+ [location_identifier, object_storage_path].join(':')
+ end
+
+ def self.count
+ with_redis do |redis|
+ redis.hlen(KEY)
end
end
- def self.complete(location_identifier, path)
- ::Gitlab::Redis::SharedState.with do |redis|
- redis.hdel(KEY, key(location_identifier, path))
+ def self.each
+ with_redis do |redis|
+ redis.hscan_each(KEY) do |entry|
+ redis_key, timestamp = entry
+ storage_location_identifier, object_storage_path = redis_key.split(':')
+
+ object = new(
+ redis_key: redis_key,
+ storage_location_identifier: storage_location_identifier,
+ object_storage_path: object_storage_path,
+ timestamp: timestamp
+ )
+
+ yield(object)
+ end
end
end
- def self.key(location_identifier, path)
- [location_identifier, path].join(':')
+ def self.with_redis(&block)
+ Gitlab::Redis::SharedState.with(&block) # rubocop:disable CodeReuse/ActiveRecord
end
+
+ def self.log_event(event, redis_key)
+ Gitlab::AppLogger.info(
+ message: "Pending direct upload #{event}",
+ redis_key: redis_key
+ )
+ end
+
+ def initialize(redis_key:, storage_location_identifier:, object_storage_path:, timestamp:)
+ @redis_key = redis_key
+ @storage_location_identifier = storage_location_identifier.to_sym
+ @object_storage_path = object_storage_path
+ @timestamp = timestamp.to_i
+ end
+
+ def stale?
+ timestamp < MAX_UPLOAD_DURATION.ago.utc.to_i
+ end
+
+ def delete
+ delete_object(object_storage_path)
+
+ self.class.with_redis do |redis|
+ redis.hdel(self.class::KEY, redis_key)
+ self.class.log_event(:deleted, redis_key)
+ end
+ end
+
+ private
+
+ attr_reader :redis_key, :storage_location_identifier, :object_storage_path, :timestamp
end
end
diff --git a/lib/sidebars/admin/menus/monitoring_menu.rb b/lib/sidebars/admin/menus/monitoring_menu.rb
index 2da56e87144..2cf21e1bf77 100644
--- a/lib/sidebars/admin/menus/monitoring_menu.rb
+++ b/lib/sidebars/admin/menus/monitoring_menu.rb
@@ -10,6 +10,7 @@ module Sidebars
add_item(background_migrations_menu_item)
add_item(background_jobs_menu_item)
add_item(health_check_menu_item)
+ add_item(metrics_dashboard_menu_item)
true
end
@@ -65,6 +66,18 @@ module Sidebars
item_id: :health_check
)
end
+
+ def metrics_dashboard_menu_item
+ return ::Sidebars::NilMenuItem.new(item_id: :metrics_dashboard) unless
+ Gitlab::CurrentSettings.current_application_settings.grafana_enabled?
+
+ ::Sidebars::MenuItem.new(
+ title: _('Metrics Dashboard'),
+ link: Gitlab::CurrentSettings.current_application_settings.grafana_url,
+ active_routes: { path: Gitlab::CurrentSettings.current_application_settings.grafana_url },
+ item_id: :metrics_dashboard
+ )
+ end
end
end
end
diff --git a/lib/tasks/gitlab/backup.rake b/lib/tasks/gitlab/backup.rake
index 22e1d903c8d..4143200ece4 100644
--- a/lib/tasks/gitlab/backup.rake
+++ b/lib/tasks/gitlab/backup.rake
@@ -206,6 +206,16 @@ namespace :gitlab do
Tasks::Gitlab::Backup.restore_task('packages')
end
end
+
+ namespace :ci_secure_files do
+ task create: :gitlab_environment do
+ Tasks::Gitlab::Backup.create_task('ci_secure_files')
+ end
+
+ task restore: :gitlab_environment do
+ Tasks::Gitlab::Backup.restore_task('ci_secure_files')
+ end
+ end
end
# namespace end: backup
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index d4d691c237c..bf30a4500a8 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -53000,6 +53000,9 @@ msgid_plural "checklist items"
msgstr[0] ""
msgstr[1] ""
+msgid "ci secure files"
+msgstr ""
+
msgid "ciReport|%{criticalStart}critical%{criticalEnd}, %{highStart}high%{highEnd} and %{otherStart}others%{otherEnd}"
msgstr ""
diff --git a/qa/Gemfile b/qa/Gemfile
index 81460c75440..d308b349216 100644
--- a/qa/Gemfile
+++ b/qa/Gemfile
@@ -40,7 +40,7 @@ gem 'chemlab', '~> 0.10'
gem 'chemlab-library-www-gitlab-com', '~> 0.1', '>= 0.1.1'
# dependencies for jenkins client
-gem 'nokogiri', '~> 1.15', '>= 1.15.1'
+gem 'nokogiri', '~> 1.15', '>= 1.15.2'
gem 'deprecation_toolkit', '~> 2.0.3', require: false
diff --git a/qa/Gemfile.lock b/qa/Gemfile.lock
index bee3ac39096..3b693e13d28 100644
--- a/qa/Gemfile.lock
+++ b/qa/Gemfile.lock
@@ -190,7 +190,7 @@ GEM
multi_json (1.15.0)
multi_xml (0.6.0)
netrc (0.11.0)
- nokogiri (1.15.1)
+ nokogiri (1.15.2)
mini_portile2 (~> 2.8.2)
racc (~> 1.4)
octokit (6.1.1)
@@ -331,7 +331,7 @@ DEPENDENCIES
gitlab_quality-test_tooling (~> 0.4.1)
influxdb-client (~> 2.9)
knapsack (~> 4.0)
- nokogiri (~> 1.15, >= 1.15.1)
+ nokogiri (~> 1.15, >= 1.15.2)
octokit (~> 6.1.1)
parallel (~> 1.23)
parallel_tests (~> 4.2, >= 4.2.1)
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index 97249d232dc..4083546a711 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -68,6 +68,8 @@ describe('Actions Notes Store', () => {
resetStore(store);
axiosMock.restore();
resetHTMLFixture();
+
+ window.gon = {};
});
describe('setNotesData', () => {
@@ -1467,6 +1469,29 @@ describe('Actions Notes Store', () => {
);
});
+ it('dispatches `fetchDiscussionsBatch` action with notes_filter 0 for merge request', () => {
+ window.gon = { features: { mrActivityFilters: true } };
+
+ return testAction(
+ actions.fetchDiscussions,
+ { path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
+ { noteableType: notesConstants.MERGE_REQUEST_NOTEABLE_TYPE },
+ [],
+ [
+ {
+ type: 'fetchDiscussionsBatch',
+ payload: {
+ config: {
+ params: { notes_filter: 0, persist_filter: false },
+ },
+ path: 'test-path',
+ perPage: 20,
+ },
+ },
+ ],
+ );
+ });
+
it('dispatches `fetchDiscussionsBatch` action if noteable is an Issue', () => {
return testAction(
actions.fetchDiscussions,
diff --git a/spec/frontend/snippets/components/show_spec.js b/spec/frontend/snippets/components/show_spec.js
index 45a7c7b0b4a..4d079ee91ee 100644
--- a/spec/frontend/snippets/components/show_spec.js
+++ b/spec/frontend/snippets/components/show_spec.js
@@ -89,22 +89,32 @@ describe('Snippet view app', () => {
describe('Embed dropdown rendering', () => {
it.each`
- visibilityLevel | condition | isRendered
- ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${'not render'} | ${false}
- ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${'not render'} | ${false}
- ${'foo'} | ${'not render'} | ${false}
- ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'render'} | ${true}
- `('does $condition embed-dropdown by default', ({ visibilityLevel, isRendered }) => {
- createComponent({
- data: {
- snippet: {
- visibilityLevel,
- webUrl,
+ snippetVisibility | projectVisibility | condition | isRendered
+ ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${undefined} | ${'render'} | ${true}
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'render'} | ${true}
+ ${VISIBILITY_LEVEL_INTERNAL_STRING} | ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${undefined} | ${'not render'} | ${false}
+ ${'foo'} | ${undefined} | ${'not render'} | ${false}
+ ${VISIBILITY_LEVEL_PUBLIC_STRING} | ${VISIBILITY_LEVEL_PRIVATE_STRING} | ${'not render'} | ${false}
+ `(
+ 'does $condition embed-dropdown by default',
+ ({ snippetVisibility, projectVisibility, isRendered }) => {
+ createComponent({
+ data: {
+ snippet: {
+ visibilityLevel: snippetVisibility,
+ webUrl,
+ project: {
+ visibility: projectVisibility,
+ },
+ },
},
- },
- });
- expect(findEmbedDropdown().exists()).toBe(isRendered);
- });
+ });
+ expect(findEmbedDropdown().exists()).toBe(isRendered);
+ },
+ );
});
describe('hasUnretrievableBlobs alert rendering', () => {
diff --git a/spec/frontend/snippets/test_utils.js b/spec/frontend/snippets/test_utils.js
index dcef8fc9a8b..76b03c0aa0d 100644
--- a/spec/frontend/snippets/test_utils.js
+++ b/spec/frontend/snippets/test_utils.js
@@ -30,6 +30,7 @@ export const createGQLSnippet = () => ({
id: 'project-1',
fullPath: 'group/project',
webUrl: `${TEST_HOST}/group/project`,
+ visibility: 'public',
},
author: {
__typename: 'User',
diff --git a/spec/graphql/types/ci/catalog/resource_type_spec.rb b/spec/graphql/types/ci/catalog/resource_type_spec.rb
index 894522283cd..773be2e5b56 100644
--- a/spec/graphql/types/ci/catalog/resource_type_spec.rb
+++ b/spec/graphql/types/ci/catalog/resource_type_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Types::Ci::Catalog::ResourceType, feature_category: :pipeline_com
name
description
icon
+ web_path
versions
]
diff --git a/spec/graphql/types/notes/note_type_spec.rb b/spec/graphql/types/notes/note_type_spec.rb
index a9e45b29eea..d84530abbb3 100644
--- a/spec/graphql/types/notes/note_type_spec.rb
+++ b/spec/graphql/types/notes/note_type_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe GitlabSchema.types['Note'], feature_category: :team_planning do
author
body
body_html
+ award_emoji
confidential
internal
created_at
diff --git a/spec/lib/atlassian/jira_issue_key_extractor_spec.rb b/spec/lib/atlassian/jira_issue_key_extractor_spec.rb
index 48339d46153..d0499399746 100644
--- a/spec/lib/atlassian/jira_issue_key_extractor_spec.rb
+++ b/spec/lib/atlassian/jira_issue_key_extractor_spec.rb
@@ -41,5 +41,13 @@ RSpec.describe Atlassian::JiraIssueKeyExtractor, feature_category: :integrations
is_expected.to contain_exactly('TEST-01')
end
end
+
+ context 'with untrusted regex' do
+ subject { described_class.new('TEST-01 some A-100', custom_regex: Gitlab::UntrustedRegexp.new("[A-Z]{2,}-\\d+")).issue_keys }
+
+ it 'returns all valid Jira issue keys' do
+ is_expected.to contain_exactly('TEST-01')
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/bitbucket_import/importer_spec.rb b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
index 150ec36cd8d..4c94ecfe745 100644
--- a/spec/lib/gitlab/bitbucket_import/importer_spec.rb
+++ b/spec/lib/gitlab/bitbucket_import/importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :importers do
+RSpec.describe Gitlab::BitbucketImport::Importer, :clean_gitlab_redis_cache, feature_category: :importers do
include ImportSpecHelper
before do
@@ -258,6 +258,29 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :importers d
subject.execute
end
end
+
+ context 'when pull request was already imported' do
+ let(:pull_request_already_imported) do
+ instance_double(
+ BitbucketServer::Representation::PullRequest,
+ iid: 11)
+ end
+
+ let(:cache_key) do
+ format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :pull_requests)
+ end
+
+ before do
+ allow(subject.client).to receive(:pull_requests).and_return([pull_request, pull_request_already_imported])
+ Gitlab::Cache::Import::Caching.set_add(cache_key, pull_request_already_imported.iid)
+ end
+
+ it 'does not import the previously imported pull requests', :aggregate_failures do
+ expect { subject.execute }.to change { MergeRequest.count }.by(1)
+
+ expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, pull_request.iid)).to eq(true)
+ end
+ end
end
context 'issues statuses' do
@@ -428,6 +451,24 @@ RSpec.describe Gitlab::BitbucketImport::Importer, feature_category: :importers d
expect(importer.errors).to be_empty
end
end
+
+ context 'when issue was already imported' do
+ let(:cache_key) do
+ format(described_class::ALREADY_IMPORTED_CACHE_KEY, project: project.id, collection: :issues)
+ end
+
+ before do
+ Gitlab::Cache::Import::Caching.set_add(cache_key, sample_issues_statuses.first[:id])
+ end
+
+ it 'does not import previously imported issues', :aggregate_failures do
+ expect { subject.execute }.to change { Issue.count }.by(sample_issues_statuses.size - 1)
+
+ sample_issues_statuses.each do |sample_issues_status|
+ expect(Gitlab::Cache::Import::Caching.set_includes?(cache_key, sample_issues_status[:id])).to eq(true)
+ end
+ end
+ end
end
context 'metrics' do
diff --git a/spec/lib/gitlab/internal_events_spec.rb b/spec/lib/gitlab/internal_events_spec.rb
new file mode 100644
index 00000000000..f23979fc56a
--- /dev/null
+++ b/spec/lib/gitlab/internal_events_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Gitlab::InternalEvents, :snowplow, feature_category: :product_analytics do
+ include TrackingHelpers
+ include SnowplowHelpers
+
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event)
+ allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_snowplow)
+ allow(fake_snowplow).to receive(:event)
+ end
+
+ def expect_redis_hll_tracking(event_name)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to have_received(:track_event)
+ .with(event_name, anything)
+ end
+
+ def expect_snowplow_tracking(event_name)
+ service_ping_context = Gitlab::Tracking::ServicePingContext
+ .new(data_source: :redis_hll, event: event_name)
+ .to_context
+ .to_json
+
+ expect(SnowplowTracker::SelfDescribingJson).to have_received(:new)
+ .with(service_ping_context[:schema], service_ping_context[:data]).at_least(:once)
+
+ # Add test for creation of both contexts
+ contexts = [instance_of(SnowplowTracker::SelfDescribingJson), instance_of(SnowplowTracker::SelfDescribingJson)]
+
+ expect(fake_snowplow).to have_received(:event)
+ .with('InternalEventTracking', event_name, context: contexts)
+ end
+
+ let_it_be(:user) { build(:user) }
+ let_it_be(:project) { build(:project) }
+ let_it_be(:namespace) { project.namespace }
+
+ let(:fake_snowplow) { instance_double(Gitlab::Tracking::Destinations::Snowplow) }
+ let(:event_name) { 'g_edit_by_web_ide' }
+
+ it 'updates both RedisHLL and Snowplow', :aggregate_failures do
+ params = { user_id: user.id, project_id: project.id, namespace_id: namespace.id }
+ described_class.track_event(event_name, **params)
+
+ expect_redis_hll_tracking(event_name)
+ expect_snowplow_tracking(event_name) # Add test for arguments
+ end
+
+ it 'rescues error' do
+ params = { user_id: user.id, project_id: project.id, namespace_id: namespace.id }
+ error = StandardError.new("something went wrong")
+ allow(fake_snowplow).to receive(:event).and_raise(error)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+ .with(
+ error,
+ snowplow_category: 'InternalEventTracking',
+ snowplow_action: event_name
+ )
+
+ expect { described_class.track_event(event_name, **params) }.not_to raise_error
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 4b589dc43af..96897cea9c0 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -424,6 +424,21 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
end
+
+ context 'when the job is deferred' do
+ it 'logs start and end of job with deferred job_status' do
+ travel_to(timestamp) do
+ expect(logger).to receive(:info).with(start_payload).ordered
+ expect(logger).to receive(:info).with(deferred_payload).ordered
+ expect(subject).to receive(:log_job_start).and_call_original
+ expect(subject).to receive(:log_job_done).and_call_original
+
+ call_subject(job, 'test_queue') do
+ job['deferred'] = true
+ end
+ end
+ end
+ end
end
describe '#add_time_keys!' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb b/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb
index 16c0fceae3b..6dcf9aaeb63 100644
--- a/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/defer_jobs_spec.rb
@@ -40,14 +40,15 @@ RSpec.describe Gitlab::SidekiqMiddleware::DeferJobs, feature_category: :scalabil
context 'for the affected worker' do
it 'defers the job' do
expect(TestDeferredWorker).to receive(:perform_in).with(described_class::DELAY, *job['args'])
- expect(Sidekiq.logger).to receive(:info).with(
- class: TestDeferredWorker.name,
- job_id: job['jid'],
- message: "Deferring #{TestDeferredWorker.name} for #{described_class::DELAY} s with arguments " \
- "(#{job['args'].inspect})"
- )
expect { |b| subject.call(TestDeferredWorker.new, job, queue, &b) }.not_to yield_control
end
+
+ it 'increments the counter' do
+ subject.call(TestDeferredWorker.new, job, queue)
+
+ counter = ::Gitlab::Metrics.registry.get(:sidekiq_jobs_deferred_total)
+ expect(counter.get({ worker: "TestDeferredWorker" })).to eq(1)
+ end
end
context 'for other workers' do
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index a353a3a512c..f3e27c72143 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -267,7 +267,7 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
allow(YAML).to receive(:load_file).with(Rails.root.join('config/events/filename.yml')).and_return(test_definition)
end
- it 'dispatchs the data to .event' do
+ it 'dispatches the data to .event' do
project = build_stubbed(:project)
user = build_stubbed(:user)
@@ -317,4 +317,18 @@ RSpec.describe Gitlab::Tracking, feature_category: :application_instrumentation
expect(described_class).not_to be_snowplow_micro_enabled
end
end
+
+ describe 'tracker' do
+ it 'returns a SnowPlowMicro instance in development' do
+ allow(Rails.env).to receive(:development?).and_return(true)
+
+ expect(described_class.tracker).to be_an_instance_of(Gitlab::Tracking::Destinations::SnowplowMicro)
+ end
+
+ it 'returns a SnowPlow instance when not in development' do
+ allow(Rails.env).to receive(:development?).and_return(false)
+
+ expect(described_class.tracker).to be_an_instance_of(Gitlab::Tracking::Destinations::Snowplow)
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
index e41da6d9ea2..25c57aa00c6 100644
--- a/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/merge_request_activity_unique_counter_spec.rb
@@ -54,6 +54,11 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:merge_request) { create(:merge_request) }
let(:target_project) { merge_request.target_project }
+ let(:fake_tracker) { instance_spy(Gitlab::Tracking::Destinations::Snowplow) }
+
+ before do
+ allow(Gitlab::Tracking).to receive(:tracker).and_return(fake_tracker)
+ end
it_behaves_like 'a tracked merge request unique event' do
let(:action) { described_class::MR_USER_CREATE_ACTION }
@@ -63,14 +68,36 @@ RSpec.describe Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter, :cl
let(:action) { described_class::MR_CREATE_ACTION }
end
- it_behaves_like 'Snowplow event tracking with RedisHLL context' do
- let(:action) { :create }
- let(:category) { described_class.name }
- let(:project) { target_project }
- let(:namespace) { project.namespace.reload }
- let(:user) { project.creator }
- let(:label) { 'redis_hll_counters.code_review.i_code_review_user_create_mr_monthly' }
- let(:property) { described_class::MR_USER_CREATE_ACTION }
+ it 'logs to Snowplow', :aggregate_failures do
+ # This logic should be extracted to shared_examples
+ namespace = target_project.namespace
+
+ expect(Gitlab::Tracking::StandardContext)
+ .to receive(:new)
+ .with(
+ project_id: target_project.id,
+ user_id: user.id,
+ namespace_id: namespace.id,
+ plan_name: namespace.actual_plan_name
+ )
+ .and_call_original
+
+ expect(Gitlab::Tracking::ServicePingContext)
+ .to receive(:new)
+ .with(data_source: :redis_hll, event: described_class::MR_USER_CREATE_ACTION)
+ .and_call_original
+
+ expect(fake_tracker).to receive(:event)
+ .with(
+ 'InternalEventTracking',
+ described_class::MR_USER_CREATE_ACTION,
+ context: [
+ an_instance_of(SnowplowTracker::SelfDescribingJson),
+ an_instance_of(SnowplowTracker::SelfDescribingJson)
+ ]
+ )
+ .exactly(:once)
+ subject
end
end
diff --git a/spec/lib/object_storage/fog_helpers_spec.rb b/spec/lib/object_storage/fog_helpers_spec.rb
new file mode 100644
index 00000000000..2ad1ac22359
--- /dev/null
+++ b/spec/lib/object_storage/fog_helpers_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+module Dummy
+ class Implementation
+ include ObjectStorage::FogHelpers
+
+ def storage_location_identifier
+ :artifacts
+ end
+ end
+
+ class WrongImplementation
+ include ObjectStorage::FogHelpers
+ end
+end
+
+RSpec.describe ObjectStorage::FogHelpers, feature_category: :shared do
+ let(:implementation_class) { Dummy::Implementation }
+
+ subject { implementation_class.new.available? }
+
+ before do
+ stub_artifacts_object_storage(enabled: true)
+ end
+
+ describe '#available?' do
+ context 'when object storage is enabled' do
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when object storage is disabled' do
+ before do
+ stub_artifacts_object_storage(enabled: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'when implementing class did not define storage_location_identifier' do
+ let(:implementation_class) { Dummy::WrongImplementation }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(NotImplementedError)
+ end
+ end
+ end
+end
diff --git a/spec/lib/object_storage/pending_direct_upload_spec.rb b/spec/lib/object_storage/pending_direct_upload_spec.rb
index af08b9c8188..7acd599ed9f 100644
--- a/spec/lib/object_storage/pending_direct_upload_spec.rb
+++ b/spec/lib/object_storage/pending_direct_upload_spec.rb
@@ -2,23 +2,38 @@
require 'spec_helper'
-RSpec.describe ObjectStorage::PendingDirectUpload, :clean_gitlab_redis_shared_state, feature_category: :shared do
+RSpec.describe ObjectStorage::PendingDirectUpload, :direct_uploads, :clean_gitlab_redis_shared_state, feature_category: :shared do
let(:location_identifier) { :artifacts }
let(:path) { 'some/path/123' }
describe '.prepare' do
it 'creates a redis entry for the given location identifier and path' do
+ redis_key = described_class.redis_key(location_identifier, path)
+
+ expect_to_log(:prepared, redis_key)
+
freeze_time do
described_class.prepare(location_identifier, path)
::Gitlab::Redis::SharedState.with do |redis|
- key = described_class.key(location_identifier, path)
- expect(redis.hget('pending_direct_uploads', key)).to eq(Time.current.utc.to_i.to_s)
+ expect(redis.hget('pending_direct_uploads', redis_key)).to eq(Time.current.utc.to_i.to_s)
end
end
end
end
+ describe '.count' do
+ subject { described_class.count }
+
+ before do
+ described_class.prepare(:artifacts, 'some/path')
+ described_class.prepare(:uploads, 'some/other/path')
+ described_class.prepare(:artifacts, 'some/new/path')
+ end
+
+ it { is_expected.to eq(3) }
+ end
+
describe '.exists?' do
let(:path) { 'some/path/123' }
@@ -56,15 +71,101 @@ RSpec.describe ObjectStorage::PendingDirectUpload, :clean_gitlab_redis_shared_st
expect(described_class.exists?(location_identifier, path)).to eq(true)
+ redis_key = described_class.redis_key(location_identifier, path)
+
+ expect_to_log(:completed, redis_key)
+
described_class.complete(location_identifier, path)
expect(described_class.exists?(location_identifier, path)).to eq(false)
end
end
- describe '.key' do
- subject { described_class.key(location_identifier, path) }
+ describe '.redis_key' do
+ subject { described_class.redis_key(location_identifier, path) }
it { is_expected.to eq("#{location_identifier}:#{path}") }
end
+
+ describe '.each' do
+ before do
+ described_class.prepare(:artifacts, 'some/path')
+ described_class.prepare(:uploads, 'some/other/path')
+ described_class.prepare(:artifacts, 'some/new/path')
+ end
+
+ it 'yields each pending direct upload object' do
+ expect { |b| described_class.each(&b) }.to yield_control.exactly(3).times
+ end
+ end
+
+ describe '#stale?' do
+ let(:pending_direct_upload) do
+ described_class.new(
+ redis_key: 'artifacts:some/path',
+ storage_location_identifier: 'artifacts',
+ object_storage_path: 'some/path',
+ timestamp: timestamp
+ )
+ end
+
+ subject { pending_direct_upload.stale? }
+
+ context 'when timestamp is older than 3 hours ago' do
+ let(:timestamp) { 4.hours.ago.utc.to_i }
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when timestamp is not older than 3 hours ago' do
+ let(:timestamp) { 2.hours.ago.utc.to_i }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#delete' do
+ let(:object_storage_path) { 'some/path' }
+ let(:pending_direct_upload) do
+ described_class.new(
+ redis_key: 'artifacts:some/path',
+ storage_location_identifier: location_identifier,
+ object_storage_path: object_storage_path,
+ timestamp: 4.hours.ago
+ )
+ end
+
+ let(:location_identifier) { JobArtifactUploader.storage_location_identifier }
+ let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) }
+
+ before do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: object_storage_path,
+ body: 'something'
+ )
+
+ prepare_pending_direct_upload(object_storage_path, 4.hours.ago)
+ end
+
+ it 'deletes the object from storage and also the redis entry' do
+ redis_key = described_class.redis_key(location_identifier, object_storage_path)
+
+ expect_to_log(:deleted, redis_key)
+
+ expect { pending_direct_upload.delete }.to change { total_pending_direct_uploads }.by(-1)
+
+ expect_not_to_have_pending_direct_upload(object_storage_path)
+ expect_pending_uploaded_object_not_to_exist(object_storage_path)
+ end
+ end
+
+ def expect_to_log(event, redis_key)
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ message: "Pending direct upload #{event}",
+ redis_key: redis_key
+ )
+ end
end
diff --git a/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb b/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb
index 0483159da7a..3bf43b9a251 100644
--- a/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb
+++ b/spec/lib/sidebars/admin/menus/monitoring_menu_spec.rb
@@ -3,10 +3,38 @@
require 'spec_helper'
RSpec.describe Sidebars::Admin::Menus::MonitoringMenu, feature_category: :navigation do
+ let_it_be(:user) { create(:user, :admin) }
+ let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
+ let(:menu) { described_class.new(context) }
+
it_behaves_like 'Admin menu',
link: '/admin/system_info',
title: s_('Admin|Monitoring'),
icon: 'monitor'
it_behaves_like 'Admin menu with sub menus'
+
+ describe 'Menu items' do
+ subject { described_class.new(context).renderable_items.index { |e| e.item_id == item_id } }
+
+ describe 'Metrics Dashboard' do
+ let(:item_id) { :metrics_dashboard }
+
+ before do
+ stub_application_setting(grafana_enabled: grafana_enabled)
+ end
+
+ context 'when grafana is enabled' do
+ let(:grafana_enabled) { true }
+
+ specify { is_expected.not_to be_nil }
+ end
+
+ context 'when grafana is disabled' do
+ let(:grafana_enabled) { false }
+
+ specify { is_expected.to be_nil }
+ end
+ end
+ end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index 8b8ffb9d23c..236b185fca1 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -253,4 +253,10 @@ RSpec.describe PlanLimits do
expect(attributes).to all(include(be_zero))
end
end
+
+ describe '#dashboard_storage_limit_enabled?' do
+ it 'returns false' do
+ expect(plan_limits.dashboard_storage_limit_enabled?).to be false
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index 628a2117e9d..478112b687a 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -288,60 +288,6 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
end
end
- describe 'fetching work item notes widget' do
- let(:item_filter_params) { { iid: item2.iid.to_s } }
- let(:fields) do
- <<~GRAPHQL
- edges {
- node {
- widgets {
- type
- ... on WorkItemWidgetNotes {
- system: discussions(filter: ONLY_ACTIVITY, first: 10) { nodes { id notes { nodes { id system internal body } } } },
- comments: discussions(filter: ONLY_COMMENTS, first: 10) { nodes { id notes { nodes { id system internal body } } } },
- all_notes: discussions(filter: ALL_NOTES, first: 10) { nodes { id notes { nodes { id system internal body } } } }
- }
- }
- }
- }
- GRAPHQL
- end
-
- before_all do
- create_notes(item1, "some note1")
- create_notes(item2, "some note2")
- end
-
- shared_examples 'fetches work item notes' do |user_comments_count:, system_notes_count:|
- it "fetches notes" do
- post_graphql(query, current_user: current_user)
-
- all_widgets = graphql_dig_at(items_data, :node, :widgets)
- notes_widget = all_widgets.find { |x| x["type"] == "NOTES" }
-
- all_notes = graphql_dig_at(notes_widget["all_notes"], :nodes)
- system_notes = graphql_dig_at(notes_widget["system"], :nodes)
- comments = graphql_dig_at(notes_widget["comments"], :nodes)
-
- expect(comments.count).to eq(user_comments_count)
- expect(system_notes.count).to eq(system_notes_count)
- expect(all_notes.count).to eq(user_comments_count + system_notes_count)
- end
- end
-
- context 'when user has permission to view internal notes' do
- before do
- project.add_developer(current_user)
- end
-
- it_behaves_like 'fetches work item notes', user_comments_count: 2, system_notes_count: 5
- end
-
- context 'when user cannot view internal notes' do
- it_behaves_like 'fetches work item notes', user_comments_count: 1, system_notes_count: 5
- end
- end
-
context 'when fetching work item notifications widget' do
let(:fields) do
<<~GRAPHQL
@@ -426,26 +372,4 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
query_graphql_field('workItems', params, fields)
)
end
-
- def create_notes(work_item, note_body)
- create(:note, system: true, project: work_item.project, noteable: work_item)
-
- disc_start = create(:discussion_note_on_issue, noteable: work_item, project: work_item.project, note: note_body)
- create(:note,
- discussion_id: disc_start.discussion_id, noteable: work_item,
- project: work_item.project, note: "reply on #{note_body}")
-
- create(:resource_label_event, user: current_user, issue: work_item, label: label1, action: 'add')
- create(:resource_label_event, user: current_user, issue: work_item, label: label1, action: 'remove')
-
- create(:resource_milestone_event, issue: work_item, milestone: milestone1, action: 'add')
- create(:resource_milestone_event, issue: work_item, milestone: milestone1, action: 'remove')
-
- # confidential notes are currently available only on issues and epics
- conf_disc_start = create(:discussion_note_on_issue, :confidential,
- noteable: work_item, project: work_item.project, note: "confidential #{note_body}")
- create(:note, :confidential,
- discussion_id: conf_disc_start.discussion_id, noteable: work_item,
- project: work_item.project, note: "reply on confidential #{note_body}")
- end
end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index f315501f0fa..f147240e1f2 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -541,6 +541,95 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
end
end
+ describe 'notes widget' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetNotes {
+ system: discussions(filter: ONLY_ACTIVITY, first: 10) { nodes { id notes { nodes { id system internal body } } } },
+ comments: discussions(filter: ONLY_COMMENTS, first: 10) { nodes { id notes { nodes { id system internal body } } } },
+ all_notes: discussions(filter: ALL_NOTES, first: 10) { nodes { id notes { nodes { id system internal body } } } }
+ }
+ }
+ GRAPHQL
+ end
+
+ context 'when fetching award emoji from notes' do
+ let(:work_item_fields) do
+ <<~GRAPHQL
+ id
+ widgets {
+ type
+ ... on WorkItemWidgetNotes {
+ discussions(filter: ONLY_COMMENTS, first: 10) {
+ nodes {
+ id
+ notes {
+ nodes {
+ id
+ body
+ awardEmoji {
+ nodes {
+ name
+ user {
+ name
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ GRAPHQL
+ end
+
+ let_it_be(:note) { create(:note, project: work_item.project, noteable: work_item) }
+
+ before_all do
+ create(:award_emoji, awardable: note, name: 'rocket', user: developer)
+ end
+
+ it 'returns award emoji data' do
+ all_widgets = graphql_dig_at(work_item_data, :widgets)
+ notes_widget = all_widgets.find { |x| x['type'] == 'NOTES' }
+ notes = graphql_dig_at(notes_widget['discussions'], :nodes).flat_map { |d| d['notes']['nodes'] }
+
+ note_with_emoji = notes.find { |n| n['id'] == note.to_gid.to_s }
+
+ expect(note_with_emoji).to include(
+ 'awardEmoji' => {
+ 'nodes' => include(
+ hash_including(
+ 'name' => 'rocket',
+ 'user' => {
+ 'name' => developer.name
+ }
+ )
+ )
+ }
+ )
+ end
+
+ it 'avoids N+1 queries' do
+ post_graphql(query, current_user: developer)
+
+ control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: developer) }
+
+ expect_graphql_errors_to_be_empty
+
+ another_note = create(:note, project: work_item.project, noteable: work_item)
+ create(:award_emoji, awardable: another_note, name: 'star', user: guest)
+
+ expect { post_graphql(query, current_user: developer) }.not_to exceed_query_limit(control)
+ expect_graphql_errors_to_be_empty
+ end
+ end
+ end
+
context 'when an Issue Global ID is provided' do
let(:global_id) { Issue.find(work_item.id).to_gid.to_s }
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index 0ad9b3c82ab..f796edfb20e 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :groups_a
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(group_kind_json_response.keys).to include('id', 'kind', 'name', 'path', 'full_path',
- 'parent_id', 'members_count_with_descendants')
+ 'parent_id', 'members_count_with_descendants', 'root_repository_size')
expect(user_kind_json_response.keys).to include('id', 'kind', 'name', 'path', 'full_path', 'parent_id')
end
@@ -66,7 +66,7 @@ RSpec.describe API::Namespaces, :aggregate_failures, feature_category: :groups_a
owned_group_response = json_response.find { |resource| resource['id'] == group1.id }
expect(owned_group_response.keys).to include('id', 'kind', 'name', 'path', 'full_path',
- 'parent_id', 'members_count_with_descendants')
+ 'parent_id', 'members_count_with_descendants', 'root_repository_size')
end
it "returns correct attributes when user cannot admin group" do
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index 50a3d49d4a3..7255d19ef8a 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -231,5 +231,30 @@ RSpec.describe MergeRequests::AfterCreateService, feature_category: :code_review
expect(service).to have_received(:execute).with(merge_request)
end
+
+ describe 'logging' do
+ it 'logs specific events' do
+ ::Gitlab::ApplicationContext.push(caller_id: 'NewMergeRequestWorker')
+
+ allow(Gitlab::AppLogger).to receive(:info).and_call_original
+
+ [
+ 'Executing hooks',
+ 'Executed hooks',
+ 'Creating pipeline',
+ 'Pipeline created'
+ ].each do |message|
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ 'meta.caller_id' => 'NewMergeRequestWorker',
+ message: message,
+ merge_request_id: merge_request.id
+ )
+ ).and_call_original
+ end
+
+ execute_service
+ end
+ end
end
end
diff --git a/spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb b/spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb
new file mode 100644
index 00000000000..e44d57e9bb5
--- /dev/null
+++ b/spec/services/object_storage/delete_stale_direct_uploads_service_spec.rb
@@ -0,0 +1,108 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ObjectStorage::DeleteStaleDirectUploadsService, :direct_uploads, :clean_gitlab_redis_shared_state, feature_category: :shared do
+ let(:service) { described_class.new }
+
+ describe '#execute', :aggregate_failures do
+ subject(:execute_result) { service.execute }
+
+ let(:location_identifier) { JobArtifactUploader.storage_location_identifier }
+ let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) }
+
+ let(:stale_path_1) { 'stale/path/123' }
+ let!(:stale_object_1) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: stale_path_1,
+ body: 'something'
+ )
+ end
+
+ let(:stale_path_2) { 'stale/path/456' }
+ let!(:stale_object_2) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: stale_path_2,
+ body: 'something'
+ )
+ end
+
+ let(:non_stale_path) { 'nonstale/path/123' }
+ let!(:non_stale_object) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: non_stale_path,
+ body: 'something'
+ )
+ end
+
+ it 'only deletes stale entries', :aggregate_failures do
+ prepare_pending_direct_upload(stale_path_1, 5.hours.ago)
+ prepare_pending_direct_upload(stale_path_2, 4.hours.ago)
+ prepare_pending_direct_upload(non_stale_path, 3.minutes.ago)
+
+ expect(execute_result).to eq(
+ status: :success,
+ total_pending_entries: 3,
+ total_deleted_stale_entries: 2,
+ execution_timeout: false
+ )
+
+ expect_not_to_have_pending_direct_upload(stale_path_1)
+ expect_pending_uploaded_object_not_to_exist(stale_path_1)
+
+ expect_not_to_have_pending_direct_upload(stale_path_2)
+ expect_pending_uploaded_object_not_to_exist(stale_path_2)
+
+ expect_to_have_pending_direct_upload(non_stale_path)
+ expect_pending_uploaded_object_to_exist(non_stale_path)
+ end
+
+ context 'when a stale entry does not have a matching object in the storage' do
+ it 'does not fail and still remove the stale entry' do
+ stale_no_object_path = 'some/other/path'
+ prepare_pending_direct_upload(stale_path_1, 5.hours.ago)
+ prepare_pending_direct_upload(stale_no_object_path, 5.hours.ago)
+
+ expect(execute_result[:status]).to eq(:success)
+
+ expect_not_to_have_pending_direct_upload(stale_path_1)
+ expect_pending_uploaded_object_not_to_exist(stale_path_1)
+
+ expect_not_to_have_pending_direct_upload(stale_no_object_path)
+ end
+ end
+
+ context 'when timeout happens' do
+ before do
+ stub_const("#{described_class}::MAX_EXEC_DURATION", 0.seconds)
+
+ prepare_pending_direct_upload(stale_path_1, 5.hours.ago)
+ prepare_pending_direct_upload(stale_path_2, 4.hours.ago)
+ end
+
+ it 'completes the current iteration and reports information about total entries' do
+ expect(execute_result).to eq(
+ status: :success,
+ total_pending_entries: 2,
+ total_deleted_stale_entries: 1,
+ execution_timeout: true
+ )
+
+ expect_not_to_have_pending_direct_upload(stale_path_1)
+ expect_pending_uploaded_object_not_to_exist(stale_path_1)
+
+ expect_to_have_pending_direct_upload(stale_path_2)
+ expect_pending_uploaded_object_to_exist(stale_path_2)
+ end
+ end
+ end
+end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index e52a1f1c56d..0d97af0df0f 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -181,6 +181,7 @@ RSpec.configure do |config|
config.include RequestUrgencyMatcher, type: :controller
config.include RequestUrgencyMatcher, type: :request
config.include Capybara::RSpecMatchers, type: :request
+ config.include PendingDirectUploadHelpers, :direct_uploads
config.include_context 'when rendered has no HTML escapes', type: :view
diff --git a/spec/support/helpers/pending_direct_uploads_helpers.rb b/spec/support/helpers/pending_direct_uploads_helpers.rb
new file mode 100644
index 00000000000..2c74521b442
--- /dev/null
+++ b/spec/support/helpers/pending_direct_uploads_helpers.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+module PendingDirectUploadHelpers
+ def prepare_pending_direct_upload(path, time)
+ travel_to time do
+ ObjectStorage::PendingDirectUpload.prepare(
+ location_identifier,
+ path
+ )
+ end
+ end
+
+ def expect_to_have_pending_direct_upload(path)
+ expect(ObjectStorage::PendingDirectUpload.exists?(location_identifier, path)).to eq(true)
+ end
+
+ def expect_not_to_have_pending_direct_upload(path)
+ expect(ObjectStorage::PendingDirectUpload.exists?(location_identifier, path)).to eq(false)
+ end
+
+ def expect_pending_uploaded_object_not_to_exist(path)
+ expect { fog_connection.get_object(location_identifier.to_s, path) }.to raise_error(Excon::Error::NotFound)
+ end
+
+ def expect_pending_uploaded_object_to_exist(path)
+ expect { fog_connection.get_object(location_identifier.to_s, path) }.not_to raise_error
+ end
+
+ def total_pending_direct_uploads
+ ObjectStorage::PendingDirectUpload.with_redis do |redis|
+ redis.hlen(ObjectStorage::PendingDirectUpload::KEY)
+ end
+ end
+end
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 96abf527d85..c45b48e27f8 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -165,6 +165,7 @@ module TestEnv
FileUtils.mkdir_p(lfs_path)
FileUtils.mkdir_p(terraform_state_path)
FileUtils.mkdir_p(packages_path)
+ FileUtils.mkdir_p(ci_secure_files_path)
end
def setup_gitlab_shell
@@ -343,6 +344,10 @@ module TestEnv
Gitlab.config.packages.storage_path
end
+ def ci_secure_files_path
+ Gitlab.config.ci_secure_files.storage_path
+ end
+
# When no cached assets exist, manually hit the root path to create them
#
# Otherwise they'd be created by the first test, often timing out and
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
index a3c688bb69e..88339df3475 100644
--- a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -65,6 +65,13 @@ RSpec.shared_context 'structured_logger' do
)
end
+ let(:deferred_payload) do
+ end_payload.merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: deferred: 0.0 sec',
+ 'job_status' => 'deferred'
+ )
+ end
+
let(:exception_payload) do
end_payload.merge(
'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
diff --git a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
index 99d122e8254..4a33ace340b 100644
--- a/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/notes_on_noteables_shared_examples.rb
@@ -20,6 +20,14 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
edges {
node {
#{all_graphql_fields_for('Note', max_depth: 1)}
+ awardEmoji {
+ nodes {
+ name
+ user {
+ name
+ }
+ }
+ }
}
}
}
@@ -40,6 +48,22 @@ RSpec.shared_context 'exposing regular notes on a noteable in GraphQL' do
expect(noteable_data['notes']['edges'].first['node']['body'])
.to eq(note.note)
end
+
+ it 'avoids N+1 queries' do
+ create(:award_emoji, awardable: note, name: 'star', user: user)
+
+ post_graphql(query, current_user: user)
+
+ control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: user) }
+
+ expect_graphql_errors_to_be_empty
+
+ another_note = create(:note, project: note.project, noteable: noteable, author: user)
+ create(:award_emoji, awardable: another_note, name: 'star', user: user)
+
+ expect { post_graphql(query, current_user: user) }.not_to exceed_query_limit(control)
+ expect_graphql_errors_to_be_empty
+ end
end
context "for discussions" do
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 7113818ed34..c0da1b5d2d1 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -5,9 +5,12 @@ require 'rake_helper'
RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category: :backup_restore do
let(:enable_registry) { true }
let(:backup_restore_pid_path) { "#{Rails.application.root}/tmp/backup_restore.pid" }
- let(:backup_tasks) { %w[db repo uploads builds artifacts pages lfs terraform_state registry packages] }
+ let(:backup_tasks) do
+ %w[db repo uploads builds artifacts pages lfs terraform_state registry packages ci_secure_files]
+ end
+
let(:backup_types) do
- %w[db repositories uploads builds artifacts pages lfs terraform_state registry packages]
+ %w[db repositories uploads builds artifacts pages lfs terraform_state registry packages ci_secure_files]
end
def tars_glob
@@ -27,6 +30,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
terraform_state.tar.gz
pages.tar.gz
packages.tar.gz
+ ci_secure_files.tar.gz
]
end
@@ -315,6 +319,8 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping container registry images ... done")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... ")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping packages ... done")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci secure files ... ")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci secure files ... done")
backup_tasks.each do |task|
run_rake_task("gitlab:backup:#{task}:create")
@@ -391,6 +397,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
terraform_state.tar.gz
registry.tar.gz
packages.tar.gz
+ ci_secure_files.tar.gz
]
)
@@ -405,6 +412,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
expect(tar_contents).to match('terraform_state.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).to match('packages.tar.gz')
+ expect(tar_contents).to match('ci_secure_files.tar.gz')
expect(tar_contents).not_to match(%r{^.{4,9}[rwx].* (database.sql.gz|uploads.tar.gz|repositories|builds.tar.gz|
pages.tar.gz|artifacts.tar.gz|registry.tar.gz)/$})
end
@@ -612,6 +620,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
terraform_state.tar.gz
registry.tar.gz
packages.tar.gz
+ ci_secure_files.tar.gz
]
)
@@ -624,6 +633,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
expect(tar_contents).to match('pages.tar.gz')
expect(tar_contents).to match('registry.tar.gz')
expect(tar_contents).to match('packages.tar.gz')
+ expect(tar_contents).to match('ci_secure_files.tar.gz')
expect(tar_contents).not_to match('repositories/')
expect(tar_contents).to match('repositories: Not found in archive')
end
@@ -668,7 +678,8 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
'pages.tar.gz',
'registry.tar.gz',
'packages.tar.gz',
- 'repositories'
+ 'repositories',
+ 'ci_secure_files.tar.gz'
)
end
diff --git a/spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb b/spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb
new file mode 100644
index 00000000000..a93f9adc0ee
--- /dev/null
+++ b/spec/workers/object_storage/delete_stale_direct_uploads_worker_spec.rb
@@ -0,0 +1,72 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ObjectStorage::DeleteStaleDirectUploadsWorker, :direct_uploads, :clean_gitlab_redis_shared_state, feature_category: :shared do
+ let(:worker) { described_class.new }
+
+ describe '#perform' do
+ it 'executes a service' do
+ expect_next_instance_of(ObjectStorage::DeleteStaleDirectUploadsService) do |instance|
+ expect(instance).to receive(:execute).and_call_original
+ end
+
+ worker.perform
+ end
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let(:location_identifier) { JobArtifactUploader.storage_location_identifier }
+ let(:fog_connection) { stub_artifacts_object_storage(JobArtifactUploader, direct_upload: true) }
+
+ let(:stale_remote_path) { 'stale/path/123' }
+ let!(:stale_object) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: stale_remote_path,
+ body: 'something'
+ )
+ end
+
+ let(:non_stale_remote_path) { 'nonstale/path/123' }
+ let!(:non_stale_object) do
+ fog_connection.directories
+ .new(key: location_identifier.to_s)
+ .files
+ .create( # rubocop:disable Rails/SaveBang
+ key: non_stale_remote_path,
+ body: 'something'
+ )
+ end
+
+ it 'only deletes stale entries', :aggregate_failures do
+ prepare_pending_direct_upload(stale_remote_path, 4.hours.ago)
+ prepare_pending_direct_upload(non_stale_remote_path, 3.minutes.ago)
+
+ subject
+
+ expect_not_to_have_pending_direct_upload(stale_remote_path)
+ expect_pending_uploaded_object_not_to_exist(stale_remote_path)
+
+ expect_to_have_pending_direct_upload(non_stale_remote_path)
+ expect_pending_uploaded_object_to_exist(non_stale_remote_path)
+ end
+
+ context 'when stale_pending_direct_uploads_cleaner feature flag is disabled' do
+ before do
+ stub_feature_flags(stale_pending_direct_uploads_cleaner: false)
+ end
+
+ it 'does nothing' do
+ prepare_pending_direct_upload(stale_remote_path, 4.hours.ago)
+
+ subject
+
+ expect_to_have_pending_direct_upload(stale_remote_path)
+ expect_pending_uploaded_object_to_exist(stale_remote_path)
+ end
+ end
+ end
+end