Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--app/assets/javascripts/projects/new/components/new_project_url_select.vue19
-rw-r--r--app/assets/javascripts/projects/project_new.js1
-rw-r--r--app/assets/javascripts/runner/components/runner_filtered_search_bar.vue12
-rw-r--r--app/models/concerns/vulnerability_finding_helpers.rb37
-rw-r--r--app/models/issue.rb1
-rw-r--r--app/models/namespace.rb1
-rw-r--r--app/models/work_item.rb1
-rw-r--r--app/services/issues/create_service.rb2
-rw-r--r--db/migrate/20220628110214_add_namespace_id_column_to_issues_table.rb13
-rw-r--r--db/post_migrate/20220622070547_add_temp_index_for_container_registry_size_migration.rb22
-rw-r--r--db/post_migrate/20220622080547_backfill_project_statistics_with_container_registry_size.rb33
-rw-r--r--db/post_migrate/20220628110823_add_issues_namespace_id_fk_and_index.rb22
-rw-r--r--db/schema_migrations/202206220705471
-rw-r--r--db/schema_migrations/202206220805471
-rw-r--r--db/schema_migrations/202206281102141
-rw-r--r--db/schema_migrations/202206281108231
-rw-r--r--db/structure.sql10
-rw-r--r--doc/api/graphql/index.md8
-rw-r--r--doc/user/group/saml_sso/scim_setup.md4
-rw-r--r--lib/gitlab/background_migration/backfill_project_statistics_container_repository_size.rb14
-rw-r--r--lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy.rb21
-rw-r--r--qa/qa/git/repository.rb2
-rw-r--r--spec/frontend/runner/components/runner_filtered_search_bar_spec.js10
-rw-r--r--spec/frontend/runner/mock_data.js202
-rw-r--r--spec/frontend/runner/runner_search_utils_spec.js210
-rw-r--r--spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb138
-rw-r--r--spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb41
-rw-r--r--spec/models/issue_spec.rb1
-rw-r--r--spec/models/namespace_spec.rb1
-rw-r--r--spec/models/work_item_spec.rb1
-rw-r--r--spec/workers/namespaces/onboarding_issue_created_worker_spec.rb2
-rw-r--r--workhorse/internal/api/api.go1
-rw-r--r--workhorse/internal/api/api_test.go45
-rw-r--r--workhorse/internal/upload/multipart_uploader.go22
-rw-r--r--workhorse/internal/upload/rewrite.go20
-rw-r--r--workhorse/internal/upstream/routes.go4
-rw-r--r--workhorse/upload_test.go48
38 files changed, 676 insertions, 299 deletions
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 8f7b4f82711..e5ae88642b2 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-9deccce765e2437e87563378f878b8604fc73a9a
+9ff756d21305e63a256ba74c6f75b5c867d9fc22
diff --git a/app/assets/javascripts/projects/new/components/new_project_url_select.vue b/app/assets/javascripts/projects/new/components/new_project_url_select.vue
index e6a02a0d67d..eccfb3d844c 100644
--- a/app/assets/javascripts/projects/new/components/new_project_url_select.vue
+++ b/app/assets/javascripts/projects/new/components/new_project_url_select.vue
@@ -57,10 +57,7 @@ export default {
id: this.namespaceId,
fullPath: this.namespaceFullPath,
}
- : {
- id: undefined,
- fullPath: s__('ProjectsNew|Pick a group or namespace'),
- },
+ : this.$options.emptyNameSpace,
shouldSkipQuery: true,
userNamespaceId: this.userNamespaceId,
};
@@ -120,12 +117,18 @@ export default {
this.setNamespace({ id, fullPath });
},
setNamespace({ id, fullPath }) {
- this.selectedNamespace = {
- id: getIdFromGraphQLId(id),
- fullPath,
- };
+ this.selectedNamespace = id
+ ? {
+ id: getIdFromGraphQLId(id),
+ fullPath,
+ }
+ : this.$options.emptyNameSpace;
},
},
+ emptyNameSpace: {
+ id: undefined,
+ fullPath: s__('ProjectsNew|Pick a group or namespace'),
+ },
};
</script>
diff --git a/app/assets/javascripts/projects/project_new.js b/app/assets/javascripts/projects/project_new.js
index 186946a83ad..fe84660422b 100644
--- a/app/assets/javascripts/projects/project_new.js
+++ b/app/assets/javascripts/projects/project_new.js
@@ -342,6 +342,7 @@ const bindEvents = () => {
export default {
bindEvents,
+ validateGroupNamespaceDropdown,
deriveProjectPathFromUrl,
onProjectNameChange,
onProjectPathChange,
diff --git a/app/assets/javascripts/runner/components/runner_filtered_search_bar.vue b/app/assets/javascripts/runner/components/runner_filtered_search_bar.vue
index f0f8bbdf5df..bff5ec9b238 100644
--- a/app/assets/javascripts/runner/components/runner_filtered_search_bar.vue
+++ b/app/assets/javascripts/runner/components/runner_filtered_search_bar.vue
@@ -45,7 +45,7 @@ export default {
},
},
data() {
- // filtered_search_bar_root.vue may mutate the inital
+ // filtered_search_bar_root.vue may mutate the initial
// filters. Use `cloneDeep` to prevent those mutations
// from affecting this component
const { filters, sort } = cloneDeep(this.value);
@@ -54,6 +54,14 @@ export default {
initialSortBy: sort,
};
},
+ computed: {
+ validTokens() {
+ // Some filters are only available in EE
+ // EE-only tokens are represented by `null` or `undefined`
+ // values when in CE
+ return this.tokens.filter(Boolean);
+ },
+ },
methods: {
onFilter(filters) {
// Apply new filters, from page 1
@@ -83,7 +91,7 @@ export default {
recent-searches-storage-key="runners-search"
:sort-options="$options.sortOptions"
:initial-filter-value="initialFilterValue"
- :tokens="tokens"
+ :tokens="validTokens"
:initial-sort-by="initialSortBy"
:search-input-placeholder="__('Search or filter results...')"
data-testid="runners-filtered-search"
diff --git a/app/models/concerns/vulnerability_finding_helpers.rb b/app/models/concerns/vulnerability_finding_helpers.rb
index 7f96b3901f1..4cf36f83857 100644
--- a/app/models/concerns/vulnerability_finding_helpers.rb
+++ b/app/models/concerns/vulnerability_finding_helpers.rb
@@ -42,4 +42,41 @@ module VulnerabilityFindingHelpers
)
end
end
+
+ def build_vulnerability_finding(security_finding)
+ report_finding = report_finding_for(security_finding)
+ return Vulnerabilities::Finding.new unless report_finding
+
+ finding_data = report_finding.to_hash.except(:compare_key, :identifiers, :location, :scanner, :links, :signatures,
+ :flags, :evidence)
+ identifiers = report_finding.identifiers.map do |identifier|
+ Vulnerabilities::Identifier.new(identifier.to_hash)
+ end
+ signatures = report_finding.signatures.map do |signature|
+ Vulnerabilities::FindingSignature.new(signature.to_hash)
+ end
+ evidence = Vulnerabilities::Finding::Evidence.new(data: report_finding.evidence.data) if report_finding.evidence
+
+ Vulnerabilities::Finding.new(finding_data).tap do |finding|
+ finding.location_fingerprint = report_finding.location.fingerprint
+ finding.vulnerability = vulnerability_for(security_finding.uuid)
+ finding.project = project
+ finding.sha = pipeline.sha
+ finding.scanner = security_finding.scanner
+ finding.finding_evidence = evidence
+
+ if calculate_false_positive?
+ finding.vulnerability_flags = report_finding.flags.map do |flag|
+ Vulnerabilities::Flag.new(flag)
+ end
+ end
+
+ finding.identifiers = identifiers
+ finding.signatures = signatures
+ end
+ end
+
+ def calculate_false_positive?
+ project.licensed_feature_available?(:sast_fp_reduction)
+ end
end
diff --git a/app/models/issue.rb b/app/models/issue.rb
index daad3c7c691..da1a9428106 100644
--- a/app/models/issue.rb
+++ b/app/models/issue.rb
@@ -46,7 +46,6 @@ class Issue < ApplicationRecord
TYPES_FOR_LIST = %w(issue incident).freeze
belongs_to :project
- has_one :namespace, through: :project
belongs_to :duplicated_to, class_name: 'Issue'
belongs_to :closed_by, class_name: 'User'
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index 4a24624528a..d6a24e7964e 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -74,6 +74,7 @@ class Namespace < ApplicationRecord
has_many :sync_events, class_name: 'Namespaces::SyncEvent'
has_one :cluster_enabled_grant, inverse_of: :namespace, class_name: 'Clusters::ClusterEnabledGrant'
+ has_many :work_items, inverse_of: :namespace, class_name: 'WorkItem'
validates :owner, presence: true, if: ->(n) { n.owner_required? }
validates :name,
diff --git a/app/models/work_item.rb b/app/models/work_item.rb
index bdd9aae90a4..642dd0736f5 100644
--- a/app/models/work_item.rb
+++ b/app/models/work_item.rb
@@ -4,6 +4,7 @@ class WorkItem < Issue
self.table_name = 'issues'
self.inheritance_column = :_type_disabled
+ belongs_to :namespace, class_name: 'Namespace', foreign_key: :namespace_id, inverse_of: :work_items
has_one :parent_link, class_name: '::WorkItems::ParentLink', foreign_key: :work_item_id
has_one :work_item_parent, through: :parent_link, class_name: 'WorkItem'
diff --git a/app/services/issues/create_service.rb b/app/services/issues/create_service.rb
index 91fbe75640f..30d4cb68840 100644
--- a/app/services/issues/create_service.rb
+++ b/app/services/issues/create_service.rb
@@ -47,7 +47,7 @@ module Issues
issue.run_after_commit do
NewIssueWorker.perform_async(issue.id, user.id)
Issues::PlacementWorker.perform_async(nil, issue.project_id)
- Namespaces::OnboardingIssueCreatedWorker.perform_async(issue.namespace.id)
+ Namespaces::OnboardingIssueCreatedWorker.perform_async(issue.project.namespace_id)
end
end
diff --git a/db/migrate/20220628110214_add_namespace_id_column_to_issues_table.rb b/db/migrate/20220628110214_add_namespace_id_column_to_issues_table.rb
new file mode 100644
index 00000000000..111ef8babae
--- /dev/null
+++ b/db/migrate/20220628110214_add_namespace_id_column_to_issues_table.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+class AddNamespaceIdColumnToIssuesTable < Gitlab::Database::Migration[2.0]
+ enable_lock_retries!
+
+ def up
+ add_column :issues, :namespace_id, :bigint
+ end
+
+ def down
+ remove_column :issues, :namespace_id
+ end
+end
diff --git a/db/post_migrate/20220622070547_add_temp_index_for_container_registry_size_migration.rb b/db/post_migrate/20220622070547_add_temp_index_for_container_registry_size_migration.rb
new file mode 100644
index 00000000000..64cdd75f5a7
--- /dev/null
+++ b/db/post_migrate/20220622070547_add_temp_index_for_container_registry_size_migration.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class AddTempIndexForContainerRegistrySizeMigration < Gitlab::Database::Migration[2.0]
+ INDEX_CONTAINER_REGISTRY_SIZE = 'tmp_index_migrated_container_registries'
+ INDEX_PROJECT_STATS_CONT_REG_SIZE = 'tmp_index_project_statistics_cont_registry_size'
+
+ disable_ddl_transaction!
+
+ def up
+ # Temporary index used in 20220622080547_backfill_project_statistics_with_container_registry_size
+ # Temporary index to be remove via https://gitlab.com/gitlab-org/gitlab/-/issues/366392
+ add_concurrent_index :container_repositories, [:project_id], name: INDEX_CONTAINER_REGISTRY_SIZE,
+ where: "migration_state = 'import_done' OR created_at >= '2022-01-23'"
+ add_concurrent_index :project_statistics, [:project_id], name: INDEX_PROJECT_STATS_CONT_REG_SIZE,
+ where: "container_registry_size = 0"
+ end
+
+ def down
+ remove_concurrent_index_by_name :container_repositories, INDEX_CONTAINER_REGISTRY_SIZE
+ remove_concurrent_index_by_name :project_statistics, INDEX_PROJECT_STATS_CONT_REG_SIZE
+ end
+end
diff --git a/db/post_migrate/20220622080547_backfill_project_statistics_with_container_registry_size.rb b/db/post_migrate/20220622080547_backfill_project_statistics_with_container_registry_size.rb
new file mode 100644
index 00000000000..2cab7ae25f5
--- /dev/null
+++ b/db/post_migrate/20220622080547_backfill_project_statistics_with_container_registry_size.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+class BackfillProjectStatisticsWithContainerRegistrySize < Gitlab::Database::Migration[2.0]
+ restrict_gitlab_migration gitlab_schema: :gitlab_main
+
+ DELAY_INTERVAL = 2.minutes.to_i
+ BATCH_SIZE = 500
+ MIGRATION_CLASS = 'BackfillProjectStatisticsContainerRepositorySize'
+ BATCH_CLASS_NAME = 'BackfillProjectStatisticsWithContainerRegistrySizeBatchingStrategy'
+ SUB_BATCH_SIZE = 100
+
+ disable_ddl_transaction!
+
+ def up
+ return unless Gitlab.dev_or_test_env? || Gitlab.com?
+
+ queue_batched_background_migration(
+ MIGRATION_CLASS,
+ :container_repositories,
+ :project_id,
+ job_interval: DELAY_INTERVAL,
+ batch_size: BATCH_SIZE,
+ batch_class_name: BATCH_CLASS_NAME,
+ sub_batch_size: SUB_BATCH_SIZE
+ )
+ end
+
+ def down
+ return unless Gitlab.dev_or_test_env? || Gitlab.com?
+
+ delete_batched_background_migration(MIGRATION_CLASS, :container_repositories, :project_id, [])
+ end
+end
diff --git a/db/post_migrate/20220628110823_add_issues_namespace_id_fk_and_index.rb b/db/post_migrate/20220628110823_add_issues_namespace_id_fk_and_index.rb
new file mode 100644
index 00000000000..5a7ca428383
--- /dev/null
+++ b/db/post_migrate/20220628110823_add_issues_namespace_id_fk_and_index.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class AddIssuesNamespaceIdFkAndIndex < Gitlab::Database::Migration[2.0]
+ disable_ddl_transaction!
+ INDEX_NAME = 'index_issues_on_namespace_id'
+
+ def up
+ add_concurrent_index :issues, :namespace_id, name: INDEX_NAME
+ add_concurrent_foreign_key :issues, :namespaces,
+ column: :namespace_id,
+ on_delete: :nullify,
+ reverse_lock_order: true
+ end
+
+ def down
+ with_lock_retries do
+ remove_foreign_key_if_exists :issues, column: :namespace_id
+ end
+
+ remove_concurrent_index_by_name :issues, INDEX_NAME
+ end
+end
diff --git a/db/schema_migrations/20220622070547 b/db/schema_migrations/20220622070547
new file mode 100644
index 00000000000..da1b4231b0d
--- /dev/null
+++ b/db/schema_migrations/20220622070547
@@ -0,0 +1 @@
+e259a91d467b3ec3e09c4514de0e798cffa697a8bc492edd6ad0dcab7f9a9623 \ No newline at end of file
diff --git a/db/schema_migrations/20220622080547 b/db/schema_migrations/20220622080547
new file mode 100644
index 00000000000..c7b3e676a30
--- /dev/null
+++ b/db/schema_migrations/20220622080547
@@ -0,0 +1 @@
+366f0819ce42bc84fc88871872d4b5870e63894fa2e32fbd7808cce2afe4815b \ No newline at end of file
diff --git a/db/schema_migrations/20220628110214 b/db/schema_migrations/20220628110214
new file mode 100644
index 00000000000..97bab836225
--- /dev/null
+++ b/db/schema_migrations/20220628110214
@@ -0,0 +1 @@
+f95de3ed746d6f661358a3826587da37009f20ba3cd0e8a332e57f9276fb856c \ No newline at end of file
diff --git a/db/schema_migrations/20220628110823 b/db/schema_migrations/20220628110823
new file mode 100644
index 00000000000..1c5bb0f3320
--- /dev/null
+++ b/db/schema_migrations/20220628110823
@@ -0,0 +1 @@
+50d788ced675b3773bbb84122040c775c24c0993c95542f5130a6456fcd4ee69 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index d6b32b8def0..ba95d458fe0 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -16575,6 +16575,7 @@ CREATE TABLE issues (
blocking_issues_count integer DEFAULT 0 NOT NULL,
upvotes_count integer DEFAULT 0 NOT NULL,
work_item_type_id bigint,
+ namespace_id bigint,
CONSTRAINT check_fba63f706d CHECK ((lock_version IS NOT NULL))
);
@@ -28513,6 +28514,8 @@ CREATE INDEX index_issues_on_milestone_id ON issues USING btree (milestone_id);
CREATE INDEX index_issues_on_moved_to_id ON issues USING btree (moved_to_id) WHERE (moved_to_id IS NOT NULL);
+CREATE INDEX index_issues_on_namespace_id ON issues USING btree (namespace_id);
+
CREATE INDEX index_issues_on_project_id_and_created_at_issue_type_incident ON issues USING btree (project_id, created_at) WHERE (issue_type = 1);
CREATE UNIQUE INDEX index_issues_on_project_id_and_external_key ON issues USING btree (project_id, external_key) WHERE (external_key IS NOT NULL);
@@ -30247,10 +30250,14 @@ CREATE INDEX tmp_index_members_on_state ON members USING btree (state) WHERE (st
CREATE INDEX tmp_index_merge_requests_draft_and_status ON merge_requests USING btree (id) WHERE ((draft = false) AND (state_id = 1) AND ((title)::text ~* '^(\[draft\]|\(draft\)|draft:|draft|\[WIP\]|WIP:|WIP)'::text));
+CREATE INDEX tmp_index_migrated_container_registries ON container_repositories USING btree (project_id) WHERE ((migration_state = 'import_done'::text) OR (created_at >= '2022-01-23 00:00:00'::timestamp without time zone));
+
CREATE UNIQUE INDEX tmp_index_on_tmp_project_id_on_namespaces ON namespaces USING btree (tmp_project_id);
CREATE INDEX tmp_index_on_vulnerabilities_non_dismissed ON vulnerabilities USING btree (id) WHERE (state <> 2);
+CREATE INDEX tmp_index_project_statistics_cont_registry_size ON project_statistics USING btree (project_id) WHERE (container_registry_size = 0);
+
CREATE UNIQUE INDEX uniq_pkgs_deb_grp_architectures_on_distribution_id_and_name ON packages_debian_group_architectures USING btree (distribution_id, name);
CREATE UNIQUE INDEX uniq_pkgs_deb_grp_components_on_distribution_id_and_name ON packages_debian_group_components USING btree (distribution_id, name);
@@ -31933,6 +31940,9 @@ ALTER TABLE ONLY projects
ALTER TABLE ONLY dast_profile_schedules
ADD CONSTRAINT fk_6cca0d8800 FOREIGN KEY (project_id) REFERENCES projects(id) ON DELETE CASCADE;
+ALTER TABLE ONLY issues
+ ADD CONSTRAINT fk_6e10d4d38a FOREIGN KEY (namespace_id) REFERENCES namespaces(id) ON DELETE SET NULL;
+
ALTER TABLE ONLY projects
ADD CONSTRAINT fk_6e5c14658a FOREIGN KEY (pool_repository_id) REFERENCES pool_repositories(id) ON DELETE SET NULL;
diff --git a/doc/api/graphql/index.md b/doc/api/graphql/index.md
index 09b97a78e04..be1bfc79aeb 100644
--- a/doc/api/graphql/index.md
+++ b/doc/api/graphql/index.md
@@ -61,14 +61,6 @@ You can work with sample queries that pull data from public projects on GitLab.c
The [get started](getting_started.md) page includes different methods to customize GraphQL queries.
-### Update the GraphQL API reference
-
-If you change the GraphQL schema, create a merge request to get your changes approved.
-To generate the required documentation and schema, see
-[Rake tasks for developers](../../development/rake_tasks.md#update-graphql-documentation-and-schema-definitions).
-
-Run the commands using the [GitLab Development Kit](https://gitlab.com/gitlab-org/gitlab-development-kit/).
-
## Breaking changes
The GitLab GraphQL API is [versionless](https://graphql.org/learn/best-practices/#versioning) and changes to the API are primarily backward-compatible.
diff --git a/doc/user/group/saml_sso/scim_setup.md b/doc/user/group/saml_sso/scim_setup.md
index cc154b96ed0..970587a7aef 100644
--- a/doc/user/group/saml_sso/scim_setup.md
+++ b/doc/user/group/saml_sso/scim_setup.md
@@ -169,7 +169,7 @@ If [Group SAML](index.md) has been configured and you have an existing GitLab.co
We recommend users do this prior to turning on sync, because while synchronization is active, there may be provisioning errors for existing users.
-New users and existing users on subsequent visits can access the group through the identify provider's dashboard or by visiting links directly.
+New users and existing users on subsequent visits can access the group through the identity provider's dashboard or by visiting links directly.
[In GitLab 14.0 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/325712), GitLab users created by [SAML SSO](index.md#user-access-and-management) or SCIM provisioning display with an **Enterprise** badge in the **Members** view.
@@ -257,7 +257,7 @@ Changing the SAML or SCIM configuration or provider can cause the following prob
| Problem | Solution |
| ------------------------------------------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| SAML and SCIM identity mismatch. | First [verify that the user's SAML NameId matches the SCIM externalId](#how-do-i-verify-users-saml-nameid-matches-the-scim-externalid) and then [update or fix the mismatched SCIM externalId and SAML NameId](#update-or-fix-mismatched-scim-externalid-and-saml-nameid). |
-| SCIM identity mismatch between GitLab and the Identify Provider SCIM app. | You can confirm whether you're hitting the error because of your SCIM identity mismatch between your SCIM app and GitLab.com by using [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) which shows up in the `id` key and compares it with the user `externalId` in the SCIM app. You can use the same [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) to update the SCIM `id` for the user on GitLab.com. |
+| SCIM identity mismatch between GitLab and the identity provider SCIM app. | You can confirm whether you're hitting the error because of your SCIM identity mismatch between your SCIM app and GitLab.com by using [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) which shows up in the `id` key and compares it with the user `externalId` in the SCIM app. You can use the same [SCIM API](../../../api/scim.md#update-a-single-scim-provisioned-user) to update the SCIM `id` for the user on GitLab.com. |
### Azure
diff --git a/lib/gitlab/background_migration/backfill_project_statistics_container_repository_size.rb b/lib/gitlab/background_migration/backfill_project_statistics_container_repository_size.rb
new file mode 100644
index 00000000000..ec813022b8f
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_project_statistics_container_repository_size.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Back-fill container_registry_size for project_statistics
+ class BackfillProjectStatisticsContainerRepositorySize < Gitlab::BackgroundMigration::BatchedMigrationJob
+ def perform
+ # no-op
+ end
+ end
+ end
+end
+
+Gitlab::BackgroundMigration::BackfillProjectStatisticsContainerRepositorySize.prepend_mod_with('Gitlab::BackgroundMigration::BackfillProjectStatisticsContainerRepositorySize') # rubocop:disable Layout/LineLength
diff --git a/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy.rb b/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy.rb
new file mode 100644
index 00000000000..9ad119310f7
--- /dev/null
+++ b/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ module BatchingStrategies
+ # Batching class to use for back-filling project_statistic's container_registry_size.
+ # Batches will be scoped to records where the project_ids are migrated
+ #
+ # If no more batches exist in the table, returns nil.
+ class BackfillProjectStatisticsWithContainerRegistrySizeBatchingStrategy < PrimaryKeyBatchingStrategy
+ MIGRATION_PHASE_1_ENDED_AT = Date.new(2022, 01, 23).freeze
+
+ def apply_additional_filters(relation, job_arguments: [], job_class: nil)
+ relation.where(created_at: MIGRATION_PHASE_1_ENDED_AT..).or(
+ relation.where(migration_state: 'import_done')
+ ).select(:project_id).distinct
+ end
+ end
+ end
+ end
+end
diff --git a/qa/qa/git/repository.rb b/qa/qa/git/repository.rb
index 01faa41a2ff..f132d7b7885 100644
--- a/qa/qa/git/repository.rb
+++ b/qa/qa/git/repository.rb
@@ -163,7 +163,7 @@ module QA
ssh
end
- env_vars << %(GIT_SSH_COMMAND="ssh -i #{ssh.private_key_file.path} -o UserKnownHostsFile=#{ssh.known_hosts_file.path}")
+ env_vars << %(GIT_SSH_COMMAND="ssh -i #{ssh.private_key_file.path} -o UserKnownHostsFile=#{ssh.known_hosts_file.path} -o IdentitiesOnly=yes")
end
def delete_ssh_key
diff --git a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
index b1b436e5443..83fb1764c6d 100644
--- a/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
+++ b/spec/frontend/runner/components/runner_filtered_search_bar_spec.js
@@ -89,6 +89,16 @@ describe('RunnerList', () => {
]);
});
+ it('can be configured with null or undefined tokens, which are ignored', () => {
+ createComponent({
+ props: {
+ tokens: [statusTokenConfig, null, undefined],
+ },
+ });
+
+ expect(findFilteredSearch().props('tokens')).toEqual([statusTokenConfig]);
+ });
+
it('fails validation for v-model with the wrong shape', () => {
expect(() => {
createComponent({ props: { value: { filters: 'wrong_filters', sort: 'sort' } } });
diff --git a/spec/frontend/runner/mock_data.js b/spec/frontend/runner/mock_data.js
index 3368fc21544..60a1a34bf95 100644
--- a/spec/frontend/runner/mock_data.js
+++ b/spec/frontend/runner/mock_data.js
@@ -17,7 +17,209 @@ import groupRunnersData from 'test_fixtures/graphql/runner/list/group_runners.qu
import groupRunnersDataPaginated from 'test_fixtures/graphql/runner/list/group_runners.query.graphql.paginated.json';
import groupRunnersCountData from 'test_fixtures/graphql/runner/list/group_runners_count.query.graphql.json';
+import { RUNNER_PAGE_SIZE } from '~/runner/constants';
+
// Other mock data
+
+// Mock searches and their corresponding urls
+export const mockSearchExamples = [
+ {
+ name: 'a default query',
+ urlQuery: '',
+ search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
+ graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ isDefault: true,
+ },
+ {
+ name: 'a single status',
+ urlQuery: '?status[]=ACTIVE',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'a single term text search',
+ urlQuery: '?search=something',
+ search: {
+ runnerType: null,
+ filters: [
+ {
+ type: 'filtered-search-term',
+ value: { data: 'something' },
+ },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'a two terms text search',
+ urlQuery: '?search=something+else',
+ search: {
+ runnerType: null,
+ filters: [
+ {
+ type: 'filtered-search-term',
+ value: { data: 'something' },
+ },
+ {
+ type: 'filtered-search-term',
+ value: { data: 'else' },
+ },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'single instance type',
+ urlQuery: '?runner_type[]=INSTANCE_TYPE',
+ search: {
+ runnerType: 'INSTANCE_TYPE',
+ filters: [],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'multiple runner status',
+ urlQuery: '?status[]=ACTIVE&status[]=PAUSED',
+ search: {
+ runnerType: null,
+ filters: [
+ { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
+ { type: 'status', value: { data: 'PAUSED', operator: '=' } },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'multiple status, a single instance type and a non default sort',
+ urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC',
+ search: {
+ runnerType: 'INSTANCE_TYPE',
+ filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_ASC',
+ },
+ graphqlVariables: {
+ status: 'ACTIVE',
+ type: 'INSTANCE_TYPE',
+ sort: 'CREATED_ASC',
+ first: RUNNER_PAGE_SIZE,
+ },
+ },
+ {
+ name: 'a tag',
+ urlQuery: '?tag[]=tag-1',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: {
+ tagList: ['tag-1'],
+ first: 20,
+ sort: 'CREATED_DESC',
+ },
+ },
+ {
+ name: 'two tags',
+ urlQuery: '?tag[]=tag-1&tag[]=tag-2',
+ search: {
+ runnerType: null,
+ filters: [
+ { type: 'tag', value: { data: 'tag-1', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-2', operator: '=' } },
+ ],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: {
+ tagList: ['tag-1', 'tag-2'],
+ first: 20,
+ sort: 'CREATED_DESC',
+ },
+ },
+ {
+ name: 'the next page',
+ urlQuery: '?page=2&after=AFTER_CURSOR',
+ search: {
+ runnerType: null,
+ filters: [],
+ pagination: { page: 2, after: 'AFTER_CURSOR' },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'the previous page',
+ urlQuery: '?page=2&before=BEFORE_CURSOR',
+ search: {
+ runnerType: null,
+ filters: [],
+ pagination: { page: 2, before: 'BEFORE_CURSOR' },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'the next page filtered by a status, an instance type, tags and a non default sort',
+ urlQuery:
+ '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
+ search: {
+ runnerType: 'INSTANCE_TYPE',
+ filters: [
+ { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-1', operator: '=' } },
+ { type: 'tag', value: { data: 'tag-2', operator: '=' } },
+ ],
+ pagination: { page: 2, after: 'AFTER_CURSOR' },
+ sort: 'CREATED_ASC',
+ },
+ graphqlVariables: {
+ status: 'ACTIVE',
+ type: 'INSTANCE_TYPE',
+ tagList: ['tag-1', 'tag-2'],
+ sort: 'CREATED_ASC',
+ after: 'AFTER_CURSOR',
+ first: RUNNER_PAGE_SIZE,
+ },
+ },
+ {
+ name: 'paused runners',
+ urlQuery: '?paused[]=true',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+ {
+ name: 'active runners',
+ urlQuery: '?paused[]=false',
+ search: {
+ runnerType: null,
+ filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
+ pagination: { page: 1 },
+ sort: 'CREATED_DESC',
+ },
+ graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
+ },
+];
+
export const onlineContactTimeoutSecs = 2 * 60 * 60;
export const staleTimeoutSecs = 7889238; // Ruby's `3.months`
diff --git a/spec/frontend/runner/runner_search_utils_spec.js b/spec/frontend/runner/runner_search_utils_spec.js
index 1f102f86b2a..23bf80cb875 100644
--- a/spec/frontend/runner/runner_search_utils_spec.js
+++ b/spec/frontend/runner/runner_search_utils_spec.js
@@ -1,4 +1,3 @@
-import { RUNNER_PAGE_SIZE } from '~/runner/constants';
import {
searchValidator,
updateOutdatedUrl,
@@ -7,208 +6,11 @@ import {
fromSearchToVariables,
isSearchFiltered,
} from '~/runner/runner_search_utils';
+import { mockSearchExamples } from './mock_data';
describe('search_params.js', () => {
- const examples = [
- {
- name: 'a default query',
- urlQuery: '',
- search: { runnerType: null, filters: [], pagination: { page: 1 }, sort: 'CREATED_DESC' },
- graphqlVariables: { sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- isDefault: true,
- },
- {
- name: 'a single status',
- urlQuery: '?status[]=ACTIVE',
- search: {
- runnerType: null,
- filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'a single term text search',
- urlQuery: '?search=something',
- search: {
- runnerType: null,
- filters: [
- {
- type: 'filtered-search-term',
- value: { data: 'something' },
- },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { search: 'something', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'a two terms text search',
- urlQuery: '?search=something+else',
- search: {
- runnerType: null,
- filters: [
- {
- type: 'filtered-search-term',
- value: { data: 'something' },
- },
- {
- type: 'filtered-search-term',
- value: { data: 'else' },
- },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { search: 'something else', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'single instance type',
- urlQuery: '?runner_type[]=INSTANCE_TYPE',
- search: {
- runnerType: 'INSTANCE_TYPE',
- filters: [],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { type: 'INSTANCE_TYPE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'multiple runner status',
- urlQuery: '?status[]=ACTIVE&status[]=PAUSED',
- search: {
- runnerType: null,
- filters: [
- { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
- { type: 'status', value: { data: 'PAUSED', operator: '=' } },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { status: 'ACTIVE', sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'multiple status, a single instance type and a non default sort',
- urlQuery: '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&sort=CREATED_ASC',
- search: {
- runnerType: 'INSTANCE_TYPE',
- filters: [{ type: 'status', value: { data: 'ACTIVE', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_ASC',
- },
- graphqlVariables: {
- status: 'ACTIVE',
- type: 'INSTANCE_TYPE',
- sort: 'CREATED_ASC',
- first: RUNNER_PAGE_SIZE,
- },
- },
- {
- name: 'a tag',
- urlQuery: '?tag[]=tag-1',
- search: {
- runnerType: null,
- filters: [{ type: 'tag', value: { data: 'tag-1', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: {
- tagList: ['tag-1'],
- first: 20,
- sort: 'CREATED_DESC',
- },
- },
- {
- name: 'two tags',
- urlQuery: '?tag[]=tag-1&tag[]=tag-2',
- search: {
- runnerType: null,
- filters: [
- { type: 'tag', value: { data: 'tag-1', operator: '=' } },
- { type: 'tag', value: { data: 'tag-2', operator: '=' } },
- ],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: {
- tagList: ['tag-1', 'tag-2'],
- first: 20,
- sort: 'CREATED_DESC',
- },
- },
- {
- name: 'the next page',
- urlQuery: '?page=2&after=AFTER_CURSOR',
- search: {
- runnerType: null,
- filters: [],
- pagination: { page: 2, after: 'AFTER_CURSOR' },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { sort: 'CREATED_DESC', after: 'AFTER_CURSOR', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'the previous page',
- urlQuery: '?page=2&before=BEFORE_CURSOR',
- search: {
- runnerType: null,
- filters: [],
- pagination: { page: 2, before: 'BEFORE_CURSOR' },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { sort: 'CREATED_DESC', before: 'BEFORE_CURSOR', last: RUNNER_PAGE_SIZE },
- },
- {
- name: 'the next page filtered by a status, an instance type, tags and a non default sort',
- urlQuery:
- '?status[]=ACTIVE&runner_type[]=INSTANCE_TYPE&tag[]=tag-1&tag[]=tag-2&sort=CREATED_ASC&page=2&after=AFTER_CURSOR',
- search: {
- runnerType: 'INSTANCE_TYPE',
- filters: [
- { type: 'status', value: { data: 'ACTIVE', operator: '=' } },
- { type: 'tag', value: { data: 'tag-1', operator: '=' } },
- { type: 'tag', value: { data: 'tag-2', operator: '=' } },
- ],
- pagination: { page: 2, after: 'AFTER_CURSOR' },
- sort: 'CREATED_ASC',
- },
- graphqlVariables: {
- status: 'ACTIVE',
- type: 'INSTANCE_TYPE',
- tagList: ['tag-1', 'tag-2'],
- sort: 'CREATED_ASC',
- after: 'AFTER_CURSOR',
- first: RUNNER_PAGE_SIZE,
- },
- },
- {
- name: 'paused runners',
- urlQuery: '?paused[]=true',
- search: {
- runnerType: null,
- filters: [{ type: 'paused', value: { data: 'true', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { paused: true, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- {
- name: 'active runners',
- urlQuery: '?paused[]=false',
- search: {
- runnerType: null,
- filters: [{ type: 'paused', value: { data: 'false', operator: '=' } }],
- pagination: { page: 1 },
- sort: 'CREATED_DESC',
- },
- graphqlVariables: { paused: false, sort: 'CREATED_DESC', first: RUNNER_PAGE_SIZE },
- },
- ];
-
describe('searchValidator', () => {
- examples.forEach(({ name, search }) => {
+ mockSearchExamples.forEach(({ name, search }) => {
it(`Validates ${name} as a search object`, () => {
expect(searchValidator(search)).toBe(true);
});
@@ -235,7 +37,7 @@ describe('search_params.js', () => {
});
describe('fromUrlQueryToSearch', () => {
- examples.forEach(({ name, urlQuery, search }) => {
+ mockSearchExamples.forEach(({ name, urlQuery, search }) => {
it(`Converts ${name} to a search object`, () => {
expect(fromUrlQueryToSearch(urlQuery)).toEqual(search);
});
@@ -268,7 +70,7 @@ describe('search_params.js', () => {
});
describe('fromSearchToUrl', () => {
- examples.forEach(({ name, urlQuery, search }) => {
+ mockSearchExamples.forEach(({ name, urlQuery, search }) => {
it(`Converts ${name} to a url`, () => {
expect(fromSearchToUrl(search)).toBe(`http://test.host/${urlQuery}`);
});
@@ -295,7 +97,7 @@ describe('search_params.js', () => {
});
describe('fromSearchToVariables', () => {
- examples.forEach(({ name, graphqlVariables, search }) => {
+ mockSearchExamples.forEach(({ name, graphqlVariables, search }) => {
it(`Converts ${name} to a GraphQL query variables object`, () => {
expect(fromSearchToVariables(search)).toEqual(graphqlVariables);
});
@@ -335,7 +137,7 @@ describe('search_params.js', () => {
});
describe('isSearchFiltered', () => {
- examples.forEach(({ name, search, isDefault }) => {
+ mockSearchExamples.forEach(({ name, search, isDefault }) => {
it(`Given ${name}, evaluates to ${isDefault ? 'not ' : ''}filtered`, () => {
expect(isSearchFiltered(search)).toBe(!isDefault);
});
diff --git a/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb
new file mode 100644
index 00000000000..94e9bcf9207
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb
@@ -0,0 +1,138 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BatchingStrategies::BackfillProjectStatisticsWithContainerRegistrySizeBatchingStrategy, '#next_batch' do # rubocop:disable Layout/LineLength
+ let(:batching_strategy) { described_class.new(connection: ActiveRecord::Base.connection) }
+ let(:namespace) { table(:namespaces) }
+ let(:project) { table(:projects) }
+ let(:container_repositories) { table(:container_repositories) }
+
+ let!(:group) do
+ namespace.create!(
+ name: 'namespace1', type: 'Group', path: 'space1'
+ )
+ end
+
+ let!(:proj_namespace1) do
+ namespace.create!(
+ name: 'proj1', path: 'proj1', type: 'Project', parent_id: group.id
+ )
+ end
+
+ let!(:proj_namespace2) do
+ namespace.create!(
+ name: 'proj2', path: 'proj2', type: 'Project', parent_id: group.id
+ )
+ end
+
+ let!(:proj_namespace3) do
+ namespace.create!(
+ name: 'proj3', path: 'proj3', type: 'Project', parent_id: group.id
+ )
+ end
+
+ let!(:proj1) do
+ project.create!(
+ name: 'proj1', path: 'proj1', namespace_id: group.id, project_namespace_id: proj_namespace1.id
+ )
+ end
+
+ let!(:proj2) do
+ project.create!(
+ name: 'proj2', path: 'proj2', namespace_id: group.id, project_namespace_id: proj_namespace2.id
+ )
+ end
+
+ let!(:proj3) do
+ project.create!(
+ name: 'proj3', path: 'proj3', namespace_id: group.id, project_namespace_id: proj_namespace3.id
+ )
+ end
+
+ let!(:con1) do
+ container_repositories.create!(
+ project_id: proj1.id,
+ name: "ContReg_#{proj1.id}:1",
+ migration_state: 'import_done',
+ created_at: Date.new(2022, 01, 20)
+ )
+ end
+
+ let!(:con2) do
+ container_repositories.create!(
+ project_id: proj1.id,
+ name: "ContReg_#{proj1.id}:2",
+ migration_state: 'import_done',
+ created_at: Date.new(2022, 01, 20)
+ )
+ end
+
+ let!(:con3) do
+ container_repositories.create!(
+ project_id: proj2.id,
+ name: "ContReg_#{proj2.id}:1",
+ migration_state: 'import_done',
+ created_at: Date.new(2022, 01, 20)
+ )
+ end
+
+ let!(:con4) do
+ container_repositories.create!(
+ project_id: proj3.id,
+ name: "ContReg_#{proj3.id}:1",
+ migration_state: 'default',
+ created_at: Date.new(2022, 02, 20)
+ )
+ end
+
+ let!(:con5) do
+ container_repositories.create!(
+ project_id: proj3.id,
+ name: "ContReg_#{proj3.id}:2",
+ migration_state: 'default',
+ created_at: Date.new(2022, 02, 20)
+ )
+ end
+
+ it { expect(described_class).to be < Gitlab::BackgroundMigration::BatchingStrategies::PrimaryKeyBatchingStrategy }
+
+ context 'when starting on the first batch' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(
+ :container_repositories,
+ :project_id,
+ batch_min_value: con1.project_id,
+ batch_size: 3,
+ job_arguments: []
+ )
+ expect(batch_bounds).to eq([con1.project_id, con4.project_id])
+ end
+ end
+
+ context 'when additional batches remain' do
+ it 'returns the bounds of the next batch' do
+ batch_bounds = batching_strategy.next_batch(
+ :container_repositories,
+ :project_id,
+ batch_min_value: con3.project_id,
+ batch_size: 3,
+ job_arguments: []
+ )
+
+ expect(batch_bounds).to eq([con3.project_id, con5.project_id])
+ end
+ end
+
+ context 'when no additional batches remain' do
+ it 'returns nil' do
+ batch_bounds = batching_strategy.next_batch(:container_repositories,
+ :project_id,
+ batch_min_value: con5.project_id + 1,
+ batch_size: 1, job_arguments: []
+ )
+
+ expect(batch_bounds).to be_nil
+ end
+ end
+end
diff --git a/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb b/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb
new file mode 100644
index 00000000000..52b75f0b8a9
--- /dev/null
+++ b/spec/migrations/20220622080547_backfill_project_statistics_with_container_registry_size_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe BackfillProjectStatisticsWithContainerRegistrySize do
+ let_it_be(:batched_migration) { described_class::MIGRATION_CLASS }
+
+ it 'does not schedule background jobs when Gitlab.com is false' do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ allow(Gitlab).to receive(:dev_or_test_env?).and_return(false)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+ end
+ end
+
+ it 'schedules background jobs for each batch of container_repository' do
+ allow(Gitlab).to receive(:com?).and_return(true)
+
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :container_repositories,
+ column_name: :project_id,
+ interval: described_class::DELAY_INTERVAL
+ )
+ }
+ end
+ end
+end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 9f864afc213..8ad2272a9ef 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe Issue do
it { is_expected.to belong_to(:milestone) }
it { is_expected.to belong_to(:iteration) }
it { is_expected.to belong_to(:project) }
- it { is_expected.to have_one(:namespace).through(:project) }
it { is_expected.to belong_to(:work_item_type).class_name('WorkItems::Type') }
it { is_expected.to belong_to(:moved_to).class_name('Issue') }
it { is_expected.to have_one(:moved_from).class_name('Issue') }
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index f4304056907..de478edf96a 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :namespace_route }
it { is_expected.to have_many :namespace_members }
it { is_expected.to have_one :cluster_enabled_grant }
+ it { is_expected.to have_many(:work_items) }
it do
is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index f3874155dd1..46129d3cc3f 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe WorkItem do
describe 'associations' do
+ it { is_expected.to belong_to(:namespace) }
it { is_expected.to have_one(:work_item_parent).class_name('WorkItem') }
it 'has one `parent_link`' do
diff --git a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
index 32e7bdd563d..53116815ce7 100644
--- a/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
+++ b/spec/workers/namespaces/onboarding_issue_created_worker_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Namespaces::OnboardingIssueCreatedWorker, '#perform' do
let_it_be(:issue) { create(:issue) }
- let(:namespace) { issue.namespace }
+ let(:namespace) { issue.project.namespace }
it_behaves_like 'records an onboarding progress action', :issue_created do
subject { described_class.new.perform(namespace.id) }
diff --git a/workhorse/internal/api/api.go b/workhorse/internal/api/api.go
index d69161cac77..92eee2bb3cf 100644
--- a/workhorse/internal/api/api.go
+++ b/workhorse/internal/api/api.go
@@ -306,6 +306,7 @@ func (api *API) PreAuthorizeFixedPath(r *http.Request, method string, path strin
return nil, fmt.Errorf("construct auth request: %w", err)
}
authReq.Header = helper.HeaderClone(r.Header)
+ authReq.URL.RawQuery = r.URL.RawQuery
failureResponse, apiResponse, err := api.PreAuthorize(path, authReq)
if err != nil {
diff --git a/workhorse/internal/api/api_test.go b/workhorse/internal/api/api_test.go
index 346f32b4a36..e3457eb0f47 100644
--- a/workhorse/internal/api/api_test.go
+++ b/workhorse/internal/api/api_test.go
@@ -2,15 +2,15 @@ package api
import (
"fmt"
+ "io"
"net/http"
"net/http/httptest"
+ "net/url"
"regexp"
"testing"
"github.com/stretchr/testify/require"
- "gitlab.com/gitlab-org/labkit/log"
-
"gitlab.com/gitlab-org/gitlab/workhorse/internal/helper"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/secret"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/testhelper"
@@ -73,16 +73,39 @@ func testRailsServer(url *regexp.Regexp, code int, body string) *httptest.Server
w.Header().Set("Content-Type", ResponseContentType)
- logEntry := log.WithFields(log.Fields{
- "method": r.Method,
- "url": r.URL,
- })
- logEntryWithCode := logEntry.WithField("code", code)
-
- // Write pure string
- logEntryWithCode.Info("UPSTREAM")
-
w.WriteHeader(code)
fmt.Fprint(w, body)
})
}
+
+func TestPreAuthorizeFixedPath(t *testing.T) {
+ var (
+ upstreamHeaders http.Header
+ upstreamQuery url.Values
+ )
+
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.URL.Path != "/my/api/path" {
+ return
+ }
+
+ upstreamHeaders = r.Header
+ upstreamQuery = r.URL.Query()
+ w.Header().Set("Content-Type", ResponseContentType)
+ io.WriteString(w, `{"TempPath":"HELLO!!"}`)
+ }))
+ defer ts.Close()
+
+ req, err := http.NewRequest("GET", "/original/request/path?q1=Q1&q2=Q2", nil)
+ require.NoError(t, err)
+ req.Header.Set("key1", "value1")
+
+ api := NewAPI(helper.URLMustParse(ts.URL), "123", http.DefaultTransport)
+ resp, err := api.PreAuthorizeFixedPath(req, "POST", "/my/api/path")
+ require.NoError(t, err)
+
+ require.Equal(t, "value1", upstreamHeaders.Get("key1"), "original headers must propagate")
+ require.Equal(t, url.Values{"q1": []string{"Q1"}, "q2": []string{"Q2"}}, upstreamQuery,
+ "original query must propagate")
+ require.Equal(t, "HELLO!!", resp.TempPath, "sanity check: successful API call")
+}
diff --git a/workhorse/internal/upload/multipart_uploader.go b/workhorse/internal/upload/multipart_uploader.go
index 2456a2c8626..e55937186a6 100644
--- a/workhorse/internal/upload/multipart_uploader.go
+++ b/workhorse/internal/upload/multipart_uploader.go
@@ -19,23 +19,15 @@ func Multipart(rails PreAuthorizer, h http.Handler, p Preparer) http.Handler {
}, "/authorize")
}
-// SkipRailsPreAuthMultipart behaves like Multipart except it does not
-// pre-authorize with Rails. It is intended for use on catch-all routes
-// where we cannot pre-authorize both because we don't know which Rails
-// endpoint to call, and because eagerly pre-authorizing would add too
-// much overhead.
-func SkipRailsPreAuthMultipart(tempPath string, myAPI *api.API, h http.Handler, p Preparer) http.Handler {
+// FixedPreAuthMultipart behaves like Multipart except it makes lazy
+// preauthorization requests when it encounters a multipart upload. The
+// preauthorization requests go to a fixed internal GitLab Rails API
+// endpoint. This endpoint currently does not support direct upload, so
+// using FixedPreAuthMultipart implies disk buffering.
+func FixedPreAuthMultipart(myAPI *api.API, h http.Handler, p Preparer) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
s := &SavedFileTracker{Request: r}
-
- // We use testAuthorizer as a temporary measure. When
- // https://gitlab.com/groups/gitlab-com/gl-infra/-/epics/742 is done, we
- // should only be using apiAuthorizer.
- fa := &testAuthorizer{
- test: &apiAuthorizer{myAPI},
- actual: &eagerAuthorizer{&api.Response{TempPath: tempPath}},
- }
-
+ fa := &apiAuthorizer{myAPI}
interceptMultipartFiles(w, r, h, s, fa, p)
})
}
diff --git a/workhorse/internal/upload/rewrite.go b/workhorse/internal/upload/rewrite.go
index d03445923fa..c5af441a373 100644
--- a/workhorse/internal/upload/rewrite.go
+++ b/workhorse/internal/upload/rewrite.go
@@ -15,8 +15,6 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
- "gitlab.com/gitlab-org/gitlab/workhorse/internal/log"
-
"gitlab.com/gitlab-org/gitlab/workhorse/internal/api"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/destination"
"gitlab.com/gitlab-org/gitlab/workhorse/internal/upload/exif"
@@ -222,21 +220,3 @@ func (aa *apiAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
}
var _ fileAuthorizer = &apiAuthorizer{}
-
-type testAuthorizer struct {
- test fileAuthorizer
- actual fileAuthorizer
-}
-
-func (ta *testAuthorizer) AuthorizeFile(r *http.Request) (*api.Response, error) {
- logger := log.WithRequest(r)
- if response, err := ta.test.AuthorizeFile(r); err != nil {
- logger.WithError(err).Error("test api preauthorize request failed")
- } else {
- logger.WithFields(log.Fields{
- "temp_path": response.TempPath,
- }).Info("test api preauthorize request")
- }
-
- return ta.actual.AuthorizeFile(r)
-}
diff --git a/workhorse/internal/upstream/routes.go b/workhorse/internal/upstream/routes.go
index 95c9b99b833..c889f87ed96 100644
--- a/workhorse/internal/upstream/routes.go
+++ b/workhorse/internal/upstream/routes.go
@@ -3,7 +3,6 @@ package upstream
import (
"net/http"
"net/url"
- "path"
"regexp"
"github.com/gorilla/websocket"
@@ -222,8 +221,7 @@ func configureRoutes(u *upstream) {
requestBodyUploader := upload.RequestBody(api, signingProxy, preparer)
mimeMultipartUploader := upload.Multipart(api, signingProxy, preparer)
- uploadPath := path.Join(u.DocumentRoot, "uploads/tmp")
- tempfileMultipartProxy := upload.SkipRailsPreAuthMultipart(uploadPath, api, proxy, preparer)
+ tempfileMultipartProxy := upload.FixedPreAuthMultipart(api, proxy, preparer)
ciAPIProxyQueue := queueing.QueueRequests("ci_api_job_requests", tempfileMultipartProxy, u.APILimit, u.APIQueueLimit, u.APIQueueTimeout)
ciAPILongPolling := builds.RegisterHandler(ciAPIProxyQueue, redis.WatchKey, u.APICILongPollingDuration)
diff --git a/workhorse/upload_test.go b/workhorse/upload_test.go
index dedda4ea655..9a6b3f8bfba 100644
--- a/workhorse/upload_test.go
+++ b/workhorse/upload_test.go
@@ -287,30 +287,38 @@ func TestBlockingRewrittenFieldsHeader(t *testing.T) {
}
for _, tc := range testCases {
- ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
- key := upload.RewrittenFieldsHeader
- if tc.present && r.URL.Path != "/api/v4/internal/workhorse/authorize_upload" {
- require.Contains(t, r.Header, key)
- } else {
- require.NotContains(t, r.Header, key)
- }
+ t.Run(tc.desc, func(t *testing.T) {
+ ts := testhelper.TestServerWithHandler(regexp.MustCompile(`.`), func(w http.ResponseWriter, r *http.Request) {
+ switch r.URL.Path {
+ case "/api/v4/internal/workhorse/authorize_upload":
+ w.Header().Set("Content-Type", api.ResponseContentType)
+ io.WriteString(w, `{"TempPath":"`+os.TempDir()+`"}`)
+ default:
+ if tc.present {
+ require.Contains(t, r.Header, upload.RewrittenFieldsHeader)
+ } else {
+ require.NotContains(t, r.Header, upload.RewrittenFieldsHeader)
- require.NotEqual(t, canary, r.Header.Get(key), "Found canary %q in header %q", canary, key)
- })
- defer ts.Close()
- ws := startWorkhorseServer(ts.URL)
- defer ws.Close()
+ }
+ }
+
+ require.NotEqual(t, canary, r.Header.Get(upload.RewrittenFieldsHeader), "Found canary %q in header", canary)
+ })
+ defer ts.Close()
+ ws := startWorkhorseServer(ts.URL)
+ defer ws.Close()
- req, err := http.NewRequest("POST", ws.URL+"/something", tc.body)
- require.NoError(t, err)
+ req, err := http.NewRequest("POST", ws.URL+"/something", tc.body)
+ require.NoError(t, err)
- req.Header.Set("Content-Type", tc.contentType)
- req.Header.Set(upload.RewrittenFieldsHeader, canary)
- resp, err := http.DefaultClient.Do(req)
- require.NoError(t, err)
- defer resp.Body.Close()
+ req.Header.Set("Content-Type", tc.contentType)
+ req.Header.Set(upload.RewrittenFieldsHeader, canary)
+ resp, err := http.DefaultClient.Do(req)
+ require.NoError(t, err)
+ defer resp.Body.Close()
- require.Equal(t, 200, resp.StatusCode, "status code")
+ require.Equal(t, 200, resp.StatusCode, "status code")
+ })
}
}