Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-08-15 03:10:24 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-08-15 03:10:24 +0300
commit3677bb721df3c9ae898b6665a8b2ae0b95a9d62f (patch)
tree7744275e960ec9e6d80d8ae400f0c6e75d2d2469
parent490269f098a972406c002c57ac0863c8521bc679 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--app/models/project_statistics.rb17
-rw-r--r--db/post_migrate/20230809133249_index_sbom_occurrences_on_project_id_component_id_and_input_file_path.rb15
-rw-r--r--db/schema_migrations/202308091332491
-rw-r--r--db/structure.sql2
-rw-r--r--doc/administration/dedicated/index.md2
-rw-r--r--doc/administration/file_hooks.md19
-rw-r--r--doc/administration/settings/account_and_limit_settings.md2
-rw-r--r--doc/update/index.md12
-rw-r--r--doc/update/versions/gitlab_16_changes.md65
-rw-r--r--doc/user/application_security/policies/scan-result-policies.md14
-rw-r--r--spec/models/namespace/root_storage_statistics_spec.rb31
-rw-r--r--spec/models/project_statistics_spec.rb119
12 files changed, 183 insertions, 116 deletions
diff --git a/app/models/project_statistics.rb b/app/models/project_statistics.rb
index 18bbfc894e7..856b77b9da7 100644
--- a/app/models/project_statistics.rb
+++ b/app/models/project_statistics.rb
@@ -19,7 +19,7 @@ class ProjectStatistics < ApplicationRecord
Namespaces::ScheduleAggregationWorker.perform_async(project_statistics.namespace_id)
end
- before_save :update_storage_size
+ after_commit :refresh_storage_size!, on: :update, if: -> { storage_size_components_changed? }
COLUMNS_TO_REFRESH = [:repository_size, :wiki_size, :lfs_objects_size, :commit_count, :snippets_size, :uploads_size, :container_registry_size].freeze
INCREMENTABLE_COLUMNS = [
@@ -111,19 +111,14 @@ class ProjectStatistics < ApplicationRecord
super.to_i
end
- def update_storage_size
- self.storage_size = storage_size_components.sum { |component| method(component).call }
- end
-
+ # Since this incremental update method does not update the storage_size directly,
+ # we have to update the storage_size separately in an after_commit action.
def refresh_storage_size!
detect_race_on_record(log_fields: { caller: __method__, attributes: :storage_size }) do
- update!(storage_size: storage_size_sum)
+ self.class.where(id: id).update_all("storage_size = #{storage_size_sum}")
end
end
- # Since this incremental update method does not call update_storage_size above through before_save,
- # we have to update the storage_size separately.
- #
# For counter attributes, storage_size will be refreshed after the counter is flushed,
# through counter_attribute_after_commit
#
@@ -175,6 +170,10 @@ class ProjectStatistics < ApplicationRecord
Namespaces::ScheduleAggregationWorker.perform_async(project.namespace_id)
end
end
+
+ def storage_size_components_changed?
+ (previous_changes.keys & STORAGE_SIZE_COMPONENTS.map(&:to_s)).any?
+ end
end
ProjectStatistics.prepend_mod_with('ProjectStatistics')
diff --git a/db/post_migrate/20230809133249_index_sbom_occurrences_on_project_id_component_id_and_input_file_path.rb b/db/post_migrate/20230809133249_index_sbom_occurrences_on_project_id_component_id_and_input_file_path.rb
new file mode 100644
index 00000000000..5b048344fad
--- /dev/null
+++ b/db/post_migrate/20230809133249_index_sbom_occurrences_on_project_id_component_id_and_input_file_path.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+class IndexSbomOccurrencesOnProjectIdComponentIdAndInputFilePath < Gitlab::Database::Migration[2.1]
+ INDEX_NAME = 'index_sbom_occurrences_for_input_file_path_search'
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :sbom_occurrences, %i[project_id component_id input_file_path], name: INDEX_NAME
+ end
+
+ def down
+ remove_concurrent_index_by_name :sbom_occurrences, INDEX_NAME
+ end
+end
diff --git a/db/schema_migrations/20230809133249 b/db/schema_migrations/20230809133249
new file mode 100644
index 00000000000..0b4fb9cd793
--- /dev/null
+++ b/db/schema_migrations/20230809133249
@@ -0,0 +1 @@
+0e710bdbd00626f66ede91b8782f8049743b5bf1e91b90e32eb733bcd159383c \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index a51ca442b86..fee37d3d523 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -33285,6 +33285,8 @@ CREATE UNIQUE INDEX index_sbom_component_versions_on_component_id_and_version ON
CREATE UNIQUE INDEX index_sbom_components_on_component_type_name_and_purl_type ON sbom_components USING btree (name, purl_type, component_type);
+CREATE INDEX index_sbom_occurrences_for_input_file_path_search ON sbom_occurrences USING btree (project_id, component_id, input_file_path);
+
CREATE INDEX index_sbom_occurrences_on_component_id ON sbom_occurrences USING btree (component_id);
CREATE INDEX index_sbom_occurrences_on_component_version_id ON sbom_occurrences USING btree (component_version_id);
diff --git a/doc/administration/dedicated/index.md b/doc/administration/dedicated/index.md
index e9211f7283c..1e27f76ad59 100644
--- a/doc/administration/dedicated/index.md
+++ b/doc/administration/dedicated/index.md
@@ -39,7 +39,7 @@ When onboarding, you must also specify your preference for the weekly four-hour
Available scheduled mainenance windows, performed outside standard working hours:
-- APAC: Wednesday 1 AM - 5 PM UTC
+- APAC: Wednesday 1 AM - 5 AM UTC
- EU: Tuesday 1 AM - 5 AM UTC
- AMER Option 1: Tuesday 7 AM - 11 AM UTC
- AMER Option 2: Sunday 9 PM - Monday 1 AM UTC
diff --git a/doc/administration/file_hooks.md b/doc/administration/file_hooks.md
index 904da47caff..2748984b51d 100644
--- a/doc/administration/file_hooks.md
+++ b/doc/administration/file_hooks.md
@@ -25,14 +25,13 @@ Instead of writing and supporting your own file hook, you can also make changes
directly to the GitLab source code and contribute back upstream. In this way, we can
ensure functionality is preserved across versions and covered by tests.
-## Setup
+## Set up a custom file hook
-The file hooks must be placed directly into the `file_hooks` directory, subdirectories
-are ignored. There is an
-[`example` directory inside `file_hooks`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/file_hooks/examples)
-where you can find some basic examples.
+File hooks must be in the `file_hooks` directory. Subdirectories are ignored.
+Find examples in the
+[`example` directory under `file_hooks`](https://gitlab.com/gitlab-org/gitlab/-/tree/master/file_hooks/examples).
-Follow the steps below to set up a custom hook:
+To set up a custom hook:
1. On the GitLab server, locate the plugin directory. For self-compiled installations, the path is usually
`/home/git/gitlab/file_hooks/`. For Linux package installations, the path is usually
@@ -51,8 +50,8 @@ Follow the steps below to set up a custom hook:
1. The data to the file hook is provided as JSON on `STDIN`. It is exactly the
same as for [system hooks](system_hooks.md).
-That's it! Assuming the file hook code is properly implemented, the hook fires
-as appropriate. The file hooks file list is updated for each event, there is no
+Assuming the file hook code is properly implemented, the hook fires
+as appropriate. The file hooks file list is updated for each event. There is no
need to restart GitLab to apply a new file hook.
If a file hook executes with non-zero exit code or GitLab fails to execute it, a
@@ -61,7 +60,7 @@ message is logged to:
- `gitlab-rails/file_hook.log` in a Linux package installation.
- `log/file_hook.log` in a self-compiled installation.
-## Creating file hooks
+## File hook example
This example responds only on the event `project_create`, and
the GitLab instance informs the administrators that a new project has been created.
@@ -88,7 +87,7 @@ Mail.deliver do
end
```
-## Validation
+## Validation example
Writing your own file hook can be tricky and it's easier if you can check it
without altering the system. A Rake task is provided so that you can use it
diff --git a/doc/administration/settings/account_and_limit_settings.md b/doc/administration/settings/account_and_limit_settings.md
index de0b43f9dda..2a98a1264e4 100644
--- a/doc/administration/settings/account_and_limit_settings.md
+++ b/doc/administration/settings/account_and_limit_settings.md
@@ -139,7 +139,7 @@ To modify the maximum download file size for imports by direct transfer:
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/128218) in GitLab 16.3.
-When you [import a project](../../user/project/settings/import_export.md), you can specify the maximum decompressed file size for imported archives. The default value is 25 GB.
+When you import a project using [file exports](../../user/project/settings/import_export.md) or [direct transfer](../../user/group/import/index.md#migrate-groups-by-direct-transfer-recommended), you can specify the maximum decompressed file size for imported archives. The default value is 25 GB.
When you import a compressed file, the decompressed size cannot exceed the maximum decompressed file size limit. If the decompressed size exceeds the configured limit, the following error is returned:
diff --git a/doc/update/index.md b/doc/update/index.md
index a39b3a14562..fc8f775c116 100644
--- a/doc/update/index.md
+++ b/doc/update/index.md
@@ -189,8 +189,8 @@ When upgrading:
1. Find where your version sits in the upgrade path:
- GitLab 14: [`14.0.12`](#1400) > [`14.3.6`](#1430) > [`14.9.5`](#1490) > [`14.10.5`](#14100).
- - GitLab 15: [`15.0.5`](#1500) > [`15.1.6`](#1510) (for GitLab instances with multiple web nodes) > [`15.4.6`](#1540) > [`15.11.x`](#15110).
- - GitLab 16: [latest `16.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases).
+ - GitLab 15: [`15.0.5`](#1500) > [`15.1.6`](#1510) (for GitLab instances with multiple web nodes) > [`15.4.6`](#1540) > [`15.11.13`](#15110).
+ - GitLab 16: [`16.0.x`](versions/gitlab_16_changes.md#1600) (only [instances with lots of users](versions/gitlab_16_changes.md#long-running-user-type-data-change)) > [latest `16.Y.Z`](https://gitlab.com/gitlab-org/gitlab/-/releases).
1. Check for [required upgrade stops](#required-upgrade-stops).
1. Consult the [version-specific upgrade instructions](#version-specific-upgrading-instructions).
@@ -837,6 +837,14 @@ A [license caching issue](https://gitlab.com/gitlab-org/gitlab/-/issues/376706)
1. Ensure all GitLab web nodes are running GitLab 15.1.Z.
1. [Enable the `active_support_hash_digest_sha256` feature flag](../administration/feature_flags.md#how-to-enable-and-disable-features-behind-flags) to switch `ActiveSupport::Digest` to use SHA256:
+
+ 1. [Start the rails console](../administration/operations/rails_console.md)
+ 1. Enable the feature flag:
+
+ ```ruby
+ Feature.enable(:active_support_hash_digest_sha256)
+ ```
+
1. Only then, continue to upgrade to later versions of GitLab.
- Unauthenticated requests to the [`ciConfig` GraphQL field](../api/graphql/reference/index.md#queryciconfig) are no longer supported.
Before you upgrade to GitLab 15.1, add an [access token](../api/rest/index.md#authentication) to your requests.
diff --git a/doc/update/versions/gitlab_16_changes.md b/doc/update/versions/gitlab_16_changes.md
index 39307c71312..09b3d79069b 100644
--- a/doc/update/versions/gitlab_16_changes.md
+++ b/doc/update/versions/gitlab_16_changes.md
@@ -24,6 +24,9 @@ For more information about upgrading GitLab Helm Chart, see [the release notes f
You should check the size of your RSA keys (`openssl rsa -in <your-key-file> -text -noout | grep "Key:"`)
for any of the applications above before
upgrading.
+- Large instances with 30,000 users or more must include 16.0 on the upgrade path and wait for
+ database migrations to complete before upgrading to a later release. [Read more](#long-running-user-type-data-change)
+ about the reason for this, and how to assess whether your GitLab instance is affected.
### Linux package installations
@@ -62,6 +65,17 @@ Specific information applies to Linux package installations:
- Impacted versions: GitLab versions 16.1.0 - 16.1.3 and 16.2.0 - 16.2.2.
- If you deployed an affected version, after upgrading to a fixed GitLab version, follow [these instructions](https://gitlab.com/gitlab-org/gitlab/-/issues/419742#to-fix-data)
to resync the affected job artifacts.
+- You might encounter the following error while upgrading to GitLab 16.2 or later:
+
+ ```plaintext
+ main: == 20230620134708 ValidateUserTypeConstraint: migrating =======================
+ main: -- execute("ALTER TABLE users VALIDATE CONSTRAINT check_0dd5948e38;")
+ rake aborted!
+ StandardError: An error has occurred, all later migrations canceled:
+ PG::CheckViolation: ERROR: check constraint "check_0dd5948e38" of relation "users" is violated by some row
+ ```
+
+ For more information, see [issue 421629](https://gitlab.com/gitlab-org/gitlab/-/issues/421629).
### Linux package installations
@@ -80,12 +94,9 @@ Specific information applies to Linux package installations:
## 16.1.0
-- A `MigrateHumanUserType` background migration will be finalized with
- the `FinalizeUserTypeMigration` migration.
- GitLab 16.0 introduced a [batched background migration](../background_migrations.md#batched-background-migrations) to
- [migrate `user_type` values from `NULL` to `0`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/115849). This
- migration may take multiple days to complete on larger GitLab instances. Make sure the migration
- has completed successfully before upgrading to 16.1.0.
+- Large instances with 30,000 users or more must include 16.0 on the upgrade path and wait for
+ database migrations to complete before upgrading to a later release. [Read more](#long-running-user-type-data-change)
+ about the reason for this, and how to assess whether your GitLab instance is affected.
- A `BackfillPreparedAtMergeRequests` background migration will be finalized with
the `FinalizeBackFillPreparedAtMergeRequests` post-deploy migration.
GitLab 15.10.0 introduced a [batched background migration](../background_migrations.md#batched-background-migrations) to
@@ -124,6 +135,9 @@ Specific information applies to installations using Geo:
## 16.0.0
+- Large instances with 30,000 users or more must include 16.0 on the upgrade path and wait for
+ database migrations to complete before upgrading to a later release. [Read more](#long-running-user-type-data-change)
+ about the reason for this, and how to assess whether your GitLab instance is affected.
- Sidekiq crashes if there are non-ASCII characters in the `/etc/gitlab/gitlab.rb` file. You can fix this
by following the workaround in [issue 412767](https://gitlab.com/gitlab-org/gitlab/-/issues/412767#note_1404507549).
- Sidekiq jobs are only routed to `default` and `mailers` queues by default, and as a result,
@@ -166,3 +180,42 @@ Specific information applies to installations using Geo:
- Impacted versions: GitLab versions 15.11.x, 16.0.x, and 16.1.0 - 16.1.2.
- Versions containing fix: GitLab 16.1.3 and later.
+
+## Long-running user type data change
+
+GitLab 16.0 is a required stop for large GitLab instances with a lot of records in the `users` table.
+
+The threshold is **30,000 users**, which includes:
+
+- Developers and other users in any state, including active, blocked, and pending approval.
+- Bot accounts for project and group access tokens.
+
+GitLab 16.0 introduced a [batched background migration](../background_migrations.md#batched-background-migrations) to
+[migrate `user_type` values from `NULL` to `0`](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/115849). This
+migration might take multiple days to complete on larger GitLab instances. Make sure the migration
+has completed successfully before upgrading to 16.1.0 or later.
+
+GitLab 16.1 introduces the `FinalizeUserTypeMigration` migration which ensures the
+16.0 `MigrateHumanUserType` background migration is completed, making the 16.0 changes synchronously
+during the upgrade if it's not completed.
+
+GitLab 16.2 [implements a `NOT NULL` database constraint](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/122454)
+which fails if the 16.0 migration is not complete.
+
+If 16.0 has been skipped (or the 16.0 migration is not complete) subsequent
+Linux package (Omnibus) and Docker upgrades might fail
+after an hour:
+
+```plaintext
+FATAL: Mixlib::ShellOut::CommandTimeout: rails_migration[gitlab-rails]
+[..]
+Mixlib::ShellOut::CommandTimeout: Command timed out after 3600s:
+```
+
+[There is a fix-forward workaround for this issue](../package/index.md#mixlibshelloutcommandtimeout-rails_migrationgitlab-rails--command-timed-out-after-3600s).
+
+While the workaround is completing the database changes, GitLab is likely to be in
+an unusuable state, generating `500` errors. The errors are caused by Sidekiq and Puma running
+application code that is incompatible with the database schema.
+
+At the end of the workaround process, Sidekiq and Puma are restarted to resolve that issue.
diff --git a/doc/user/application_security/policies/scan-result-policies.md b/doc/user/application_security/policies/scan-result-policies.md
index 211ea811d29..fabc0cb41f8 100644
--- a/doc/user/application_security/policies/scan-result-policies.md
+++ b/doc/user/application_security/policies/scan-result-policies.md
@@ -79,6 +79,7 @@ the following sections and tables provide an alternative.
> - The scan result policy field `vulnerability_attributes` was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123052) in GitLab 16.2 [with a flag](../../../administration/feature_flags.md) named `enforce_vulnerability_attributes_rules`. Disabled by default.
> - [Enabled on GitLab.com and self-managed](https://gitlab.com/gitlab-org/gitlab/-/issues/418784) in GitLab 16.3.
+> - The scan result policy field `vulnerability_age` was [introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/123956) in GitLab 16.2.
FLAG:
On self-managed GitLab, by default the `vulnerability_attributes` field is available. To hide the feature, an administrator can [disable the feature flag](../../../administration/feature_flags.md) named `enforce_vulnerability_attributes_rules`.
@@ -95,6 +96,7 @@ This rule enforces the defined actions based on security scan findings.
| `severity_levels` | `array` of `string` | true | `info`, `unknown`, `low`, `medium`, `high`, `critical` | The severity levels for this rule to consider. |
| `vulnerability_states` | `array` of `string` | true | `newly_detected`, `detected`, `confirmed`, `resolved`, `dismissed`, `new_needs_triage`, `new_dismissed` | All vulnerabilities fall into two categories:<br><br>**Newly Detected Vulnerabilities** - the `newly_detected` policy option covers vulnerabilities identified in the merge request branch itself but that do not currently exist on the default branch. This policy option requires a pipeline to complete before the rule is evaluated so that it knows whether vulnerabilities are newly detected or not. Merge requests are blocked until the pipeline and necessary security scans are complete. The `newly_detected` option considers both of the following statuses:<br><br> • Detected<br> • Dismissed<br><br> The `new_needs_triage` option considers the status<br><br> • Detected<br><br> The `new_dismissed` option considers the status<br><br> • Dismissed<br><br>**Pre-Existing Vulnerabilities** - these policy options are evaluated immediately and do not require a pipeline complete as they consider only vulnerabilities previously detected in the default branch.<br><br> • `Detected` - the policy looks for vulnerabilities in the detected state.<br> • `Confirmed` - the policy looks for vulnerabilities in the confirmed state.<br> • `Dismissed` - the policy looks for vulnerabilities in the dismissed state.<br> • `Resolved` - the policy looks for vulnerabilities in the resolved state. |
| `vulnerability_attributes` | `object` | false | `{false_positive: boolean, fix_available: boolean}` | All vulnerability findings are considered by default. But filters can be applied for attributes to consider only vulnerability findings: <br><br> • With a fix available (`fix_available: true`)<br><br> • With no fix available (`fix_available: false`)<br> • That are false positive (`false_positive: true`)<br> • That are not false positive (`false_positive: false`)<br> • Or a combination of both. For example (`fix_available: true, false_positive: false`) |
+| `vulnerability_age` | `object` | false | N/A | Filter pre-existing vulnerability findings by age. A vulnerability's age is calculated as the time since it was detected in the project. The criteria are `operator`, `value`, and `interval`.<br>- The `operator` criterion specifies if the age comparison used is older than (`greater_than`) or younger than (`less_than`).<br>- The `value` criterion specifies the numeric value representing the vulnerability's age.<br>- The `interval` criterion specifies the unit of measure of the vulnerability's age: `day`, `week`, `month`, or `year`.<br><br>Example: `operator: greater_than`, `value: 30`, `interval: day`. |
## `license_finding` rule type
@@ -179,12 +181,14 @@ scan_result_policy:
- low
- unknown
vulnerability_states:
- - newly_detected
+ - detected
+ vulnerability_age:
+ operator: greater_than
+ value: 30
+ interval: day
actions:
- type: require_approval
approvals_required: 1
- user_approvers:
- - sam.white
role_approvers:
- owner
```
@@ -193,8 +197,8 @@ In this example:
- Every MR that contains new `critical` vulnerabilities identified by container scanning requires
one approval from `alberto.dare`.
-- Every MR that contains more than one new `low` or `unknown` vulnerability identified by container
- scanning requires one approval from `sam.white`.
+- Every MR that contains more than one preexisting `low` or `unknown` vulnerability older than 30 days identified by
+ container scanning requires one approval from a project member with the Owner role.
## Example for Scan Result Policy editor
diff --git a/spec/models/namespace/root_storage_statistics_spec.rb b/spec/models/namespace/root_storage_statistics_spec.rb
index f2c661c1cfb..4b66b7532a7 100644
--- a/spec/models/namespace/root_storage_statistics_spec.rb
+++ b/spec/models/namespace/root_storage_statistics_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
total_lfs_objects_size = project_stat1.lfs_objects_size + project_stat2.lfs_objects_size
total_build_artifacts_size = project_stat1.build_artifacts_size + project_stat2.build_artifacts_size
total_packages_size = project_stat1.packages_size + project_stat2.packages_size
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size
total_snippets_size = project_stat1.snippets_size + project_stat2.snippets_size
total_pipeline_artifacts_size = project_stat1.pipeline_artifacts_size + project_stat2.pipeline_artifacts_size
total_uploads_size = project_stat1.uploads_size + project_stat2.uploads_size
@@ -64,7 +64,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.reload
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size + 999
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size + 999
expect(root_storage_statistics.container_registry_size).to eq(999)
expect(root_storage_statistics.storage_size).to eq(total_storage_size)
@@ -162,7 +162,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
total_dependency_proxy_size = root_namespace_stat.dependency_proxy_size +
group1_namespace_stat.dependency_proxy_size + group2_namespace_stat.dependency_proxy_size +
subgroup1_namespace_stat.dependency_proxy_size
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size +
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size +
root_namespace_stat.storage_size + group1_namespace_stat.storage_size +
group2_namespace_stat.storage_size + subgroup1_namespace_stat.storage_size
@@ -183,7 +183,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- total_storage_size = project_stat1.storage_size + project_stat2.storage_size
+ total_storage_size = project_stat1.reload.storage_size + project_stat2.reload.storage_size
expect(root_storage_statistics.storage_size).to eq(total_storage_size)
end
@@ -204,7 +204,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.storage_size).to eq(project_stat1.storage_size + project_stat2.storage_size)
+ expect(root_storage_statistics.storage_size)
+ .to eq(project_stat1.reload.storage_size + project_stat2.reload.storage_size)
expect(root_storage_statistics.dependency_proxy_size).to eq(0)
end
@@ -249,7 +250,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.private_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates total public forks size' do
@@ -258,7 +260,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.public_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.public_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates total internal forks size' do
@@ -267,7 +270,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.internal_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.internal_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates multiple forks' do
@@ -277,7 +281,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- total_size = fork_a.statistics.storage_size + fork_b.statistics.storage_size
+ total_size = fork_a.statistics.reload.storage_size + fork_b.statistics.reload.storage_size
expect(root_storage_statistics.reload.private_forks_storage_size).to eq(total_size)
end
@@ -289,7 +293,7 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(fork_a.statistics.storage_size)
+ expect(root_storage_statistics.reload.private_forks_storage_size).to eq(fork_a.statistics.reload.storage_size)
end
it 'aggregates forks in subgroups' do
@@ -299,7 +303,8 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
- expect(root_storage_statistics.reload.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.reload.private_forks_storage_size)
+ .to eq(project_fork.statistics.reload.storage_size)
end
it 'aggregates forks along with total storage size' do
@@ -309,9 +314,9 @@ RSpec.describe Namespace::RootStorageStatistics, type: :model, feature_category:
root_storage_statistics.recalculate!
root_storage_statistics.reload
- expect(root_storage_statistics.private_forks_storage_size).to eq(project_fork.statistics.storage_size)
+ expect(root_storage_statistics.private_forks_storage_size).to eq(project_fork.statistics.reload.storage_size)
- total = project.statistics.storage_size + project_fork.statistics.storage_size
+ total = project.statistics.storage_size + project_fork.statistics.reload.storage_size
expect(root_storage_statistics.storage_size).to eq(total)
end
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index 090173bc999..6ba2db25afe 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -25,6 +25,55 @@ RSpec.describe ProjectStatistics do
end
end
+ describe 'callbacks' do
+ context 'on after_commit' do
+ context 'when storage size components are updated' do
+ it 'updates the correct storage size for relevant attributes' do
+ statistics.update!(repository_size: 10)
+
+ expect(statistics.reload.storage_size).to eq(10)
+ end
+ end
+
+ context 'when storage size components are not updated' do
+ it 'does not affect the storage_size total' do
+ statistics.update!(pipeline_artifacts_size: 3, container_registry_size: 50)
+
+ expect(statistics.reload.storage_size).to eq(0)
+ end
+ end
+ end
+
+ describe 'with race conditions' do
+ before do
+ statistics.update!(storage_size: 14621247)
+ end
+
+ it 'handles concurrent updates correctly' do
+ # Concurrently update the statistics in two different processes
+ t1 = Thread.new do
+ stats_1 = ProjectStatistics.find(statistics.id)
+ stats_1.snippets_size = 530
+ stats_1.save!
+ end
+
+ t2 = Thread.new do
+ stats_2 = ProjectStatistics.find(statistics.id)
+ ProjectStatistics.update_counters(stats_2.id, packages_size: 1000)
+ stats_2.refresh_storage_size!
+ end
+
+ [t1, t2].each(&:join)
+
+ # Reload the statistics object
+ statistics.reload
+
+ # The final storage size should be correctly updated
+ expect(statistics.storage_size).to eq(1530) # Final value is correct (snippets_size + packages_size)
+ end
+ end
+ end
+
describe 'statistics columns' do
it "supports bigint values" do
expect do
@@ -370,75 +419,6 @@ RSpec.describe ProjectStatistics do
end
end
- describe '#update_storage_size' do
- it "sums the relevant storage counters" do
- statistics.update!(
- repository_size: 2,
- wiki_size: 4,
- lfs_objects_size: 3,
- snippets_size: 2,
- build_artifacts_size: 3,
- packages_size: 6,
- uploads_size: 5
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 25
- end
-
- it 'excludes the container_registry_size' do
- statistics.update!(
- repository_size: 2,
- uploads_size: 5,
- container_registry_size: 10
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 7
- end
-
- it 'excludes the pipeline_artifacts_size' do
- statistics.update!(
- repository_size: 2,
- uploads_size: 5,
- pipeline_artifacts_size: 10
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 7
- end
-
- it 'works during wiki_size backfill' do
- statistics.update!(
- repository_size: 2,
- wiki_size: nil,
- lfs_objects_size: 3
- )
-
- statistics.reload
-
- expect(statistics.storage_size).to eq 5
- end
-
- context 'when nullable columns are nil' do
- it 'does not raise any error' do
- expect do
- statistics.update!(
- repository_size: 2,
- wiki_size: nil,
- lfs_objects_size: 3,
- snippets_size: nil
- )
- end.not_to raise_error
-
- expect(statistics.storage_size).to eq 5
- end
- end
- end
-
describe '#refresh_storage_size!' do
subject(:refresh_storage_size) { statistics.refresh_storage_size! }
@@ -464,6 +444,7 @@ RSpec.describe ProjectStatistics do
statistics.update_columns(
repository_size: 2,
wiki_size: nil,
+ snippets_size: nil,
storage_size: 0
)
end