Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.rubocop_manual_todo.yml2
-rw-r--r--Gemfile2
-rw-r--r--Gemfile.lock6
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/approvals/approvals_summary.vue74
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/approvals/messages.js4
-rw-r--r--app/helpers/icons_helper.rb4
-rw-r--r--app/services/ci/retry_build_service.rb12
-rw-r--r--app/views/layouts/_head.html.haml2
-rw-r--r--config/feature_flags/development/sticky_environments_in_job_retry.yml8
-rw-r--r--db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb2
-rw-r--r--doc/user/discussions/index.md34
-rw-r--r--doc/user/group/iterations/index.md2
-rw-r--r--doc/user/permissions.md9
-rw-r--r--doc/user/project/integrations/webhook_events.md5
-rw-r--r--doc/user/project/settings/import_export.md65
-rw-r--r--lib/gitlab/ci/pipeline/seed/build.rb10
-rw-r--r--lib/gitlab/ci/pipeline/seed/stage.rb2
-rw-r--r--lib/gitlab/database/async_indexes/index_creator.rb2
-rw-r--r--lib/gitlab/database/async_indexes/postgres_async_index.rb2
-rw-r--r--lib/gitlab/database/postgres_index.rb2
-rw-r--r--lib/gitlab/database/postgres_index_bloat_estimate.rb2
-rw-r--r--lib/gitlab/database/reindexing.rb3
-rw-r--r--lib/gitlab/database/reindexing/reindex_action.rb2
-rw-r--r--lib/gitlab/database/reindexing/reindex_concurrently.rb8
-rw-r--r--lib/tasks/gitlab/db.rake28
-rw-r--r--locale/gitlab.pot9
-rw-r--r--qa/Gemfile.lock2
-rw-r--r--qa/lib/gitlab/page/admin/subscription.rb9
-rw-r--r--qa/lib/gitlab/page/admin/subscription.stub.rb216
-rw-r--r--qa/qa/support/helpers/plan.rb66
-rw-r--r--spec/features/merge_request/user_approves_spec.rb2
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js380
-rw-r--r--spec/frontend/repository/mock_data.js56
-rw-r--r--spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js53
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb10
-rw-r--r--spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb2
-rw-r--r--spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb2
-rw-r--r--spec/lib/gitlab/database/postgres_index_spec.rb2
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_action_spec.rb2
-rw-r--r--spec/services/ci/retry_build_service_spec.rb47
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb18
41 files changed, 787 insertions, 381 deletions
diff --git a/.rubocop_manual_todo.yml b/.rubocop_manual_todo.yml
index cf90d98340b..d5b8b0ad340 100644
--- a/.rubocop_manual_todo.yml
+++ b/.rubocop_manual_todo.yml
@@ -2449,8 +2449,6 @@ Database/MultipleDatabases:
- 'lib/gitlab/database/postgresql_adapter/dump_schema_versions_mixin.rb'
- 'lib/gitlab/database/postgresql_database_tasks/load_schema_versions_mixin.rb'
- 'lib/gitlab/database.rb'
- - 'lib/gitlab/database/reindexing/concurrent_reindex.rb'
- - 'lib/gitlab/database/reindexing/reindex_concurrently.rb'
- 'lib/gitlab/database/schema_cache_with_renamed_table.rb'
- 'lib/gitlab/database/schema_migrations/context.rb'
- 'lib/gitlab/database/schema_version_files.rb'
diff --git a/Gemfile b/Gemfile
index e2b93f37d74..495169d4854 100644
--- a/Gemfile
+++ b/Gemfile
@@ -399,7 +399,7 @@ group :development, :test do
end
group :development, :test, :danger do
- gem 'gitlab-dangerfiles', '~> 2.3.1', require: false
+ gem 'gitlab-dangerfiles', '~> 2.3.2', require: false
end
group :development, :test, :coverage do
diff --git a/Gemfile.lock b/Gemfile.lock
index e0257c136db..0e964edab9e 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -222,7 +222,7 @@ GEM
css_parser (1.7.0)
addressable
daemons (1.3.1)
- danger (8.3.1)
+ danger (8.4.1)
claide (~> 1.0)
claide-plugins (>= 0.9.2)
colored2 (~> 3.1)
@@ -458,7 +458,7 @@ GEM
terminal-table (~> 1.5, >= 1.5.1)
gitlab-chronic (0.10.5)
numerizer (~> 0.2)
- gitlab-dangerfiles (2.3.1)
+ gitlab-dangerfiles (2.3.2)
danger (>= 8.3.1)
danger-gitlab (>= 8.0.0)
gitlab-experiment (0.6.4)
@@ -1461,7 +1461,7 @@ DEPENDENCIES
gitaly (~> 14.4.0.pre.rc43)
github-markup (~> 1.7.0)
gitlab-chronic (~> 0.10.5)
- gitlab-dangerfiles (~> 2.3.1)
+ gitlab-dangerfiles (~> 2.3.2)
gitlab-experiment (~> 0.6.4)
gitlab-fog-azure-rm (~> 1.2.0)
gitlab-labkit (~> 0.21.1)
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/approvals/approvals_summary.vue b/app/assets/javascripts/vue_merge_request_widget/components/approvals/approvals_summary.vue
index 0c4a5ee35d9..25dbb614c1d 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/approvals/approvals_summary.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/components/approvals/approvals_summary.vue
@@ -1,7 +1,11 @@
<script>
import { toNounSeriesText } from '~/lib/utils/grammar';
import { n__, sprintf } from '~/locale';
-import { APPROVED_MESSAGE } from '~/vue_merge_request_widget/components/approvals/messages';
+import {
+ APPROVED_BY_YOU_AND_OTHERS,
+ APPROVED_BY_YOU,
+ APPROVED_BY_OTHERS,
+} from '~/vue_merge_request_widget/components/approvals/messages';
import UserAvatarList from '~/vue_shared/components/user_avatar/user_avatar_list.vue';
export default {
@@ -29,12 +33,23 @@ export default {
},
},
computed: {
- message() {
- if (this.approved) {
- return APPROVED_MESSAGE;
+ approvalLeftMessage() {
+ if (this.rulesLeft.length) {
+ return sprintf(
+ n__(
+ 'Requires %{count} approval from %{names}.',
+ 'Requires %{count} approvals from %{names}.',
+ this.approvalsLeft,
+ ),
+ {
+ names: toNounSeriesText(this.rulesLeft),
+ count: this.approvalsLeft,
+ },
+ false,
+ );
}
- if (!this.rulesLeft.length) {
+ if (!this.approved) {
return n__(
'Requires %d approval from eligible users.',
'Requires %d approvals from eligible users.',
@@ -42,32 +57,51 @@ export default {
);
}
- return sprintf(
- n__(
- 'Requires %{count} approval from %{names}.',
- 'Requires %{count} approvals from %{names}.',
- this.approvalsLeft,
- ),
- {
- names: toNounSeriesText(this.rulesLeft),
- count: this.approvalsLeft,
- },
- false,
- );
+ return '';
+ },
+ message() {
+ if (this.approvedByMe && this.approvedByOthers) {
+ return APPROVED_BY_YOU_AND_OTHERS;
+ }
+
+ if (this.approvedByMe) {
+ return APPROVED_BY_YOU;
+ }
+
+ if (this.approved) {
+ return APPROVED_BY_OTHERS;
+ }
+
+ return '';
},
hasApprovers() {
return Boolean(this.approvers.length);
},
+ approvedByMe() {
+ if (!this.currentUserId) {
+ return false;
+ }
+ return this.approvers.some((approver) => approver.id === this.currentUserId);
+ },
+ approvedByOthers() {
+ if (!this.currentUserId) {
+ return false;
+ }
+ return this.approvers.some((approver) => approver.id !== this.currentUserId);
+ },
+ currentUserId() {
+ return gon.current_user_id;
+ },
},
- APPROVED_MESSAGE,
};
</script>
<template>
<div data-qa-selector="approvals_summary_content">
- <strong>{{ message }}</strong>
+ <strong>{{ approvalLeftMessage }}</strong>
<template v-if="hasApprovers">
- <span>{{ s__('mrWidget|Approved by') }}</span>
+ <span v-if="approvalLeftMessage">{{ message }}</span>
+ <strong v-else>{{ message }}</strong>
<user-avatar-list class="d-inline-block align-middle" :items="approvers" />
</template>
</div>
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/approvals/messages.js b/app/assets/javascripts/vue_merge_request_widget/components/approvals/messages.js
index 0538c38307b..fbdefa95630 100644
--- a/app/assets/javascripts/vue_merge_request_widget/components/approvals/messages.js
+++ b/app/assets/javascripts/vue_merge_request_widget/components/approvals/messages.js
@@ -6,4 +6,6 @@ export const FETCH_ERROR = s__(
);
export const APPROVE_ERROR = s__('mrWidget|An error occurred while submitting your approval.');
export const UNAPPROVE_ERROR = s__('mrWidget|An error occurred while removing your approval.');
-export const APPROVED_MESSAGE = s__('mrWidget|Merge request approved.');
+export const APPROVED_BY_YOU_AND_OTHERS = s__('mrWidget|Approved by you and others');
+export const APPROVED_BY_YOU = s__('mrWidget|Approved by you');
+export const APPROVED_BY_OTHERS = s__('mrWidget|Approved by');
diff --git a/app/helpers/icons_helper.rb b/app/helpers/icons_helper.rb
index c38b4a7aedf..32d808c960c 100644
--- a/app/helpers/icons_helper.rb
+++ b/app/helpers/icons_helper.rb
@@ -9,9 +9,7 @@ module IconsHelper
def custom_icon(icon_name, size: DEFAULT_ICON_SIZE)
memoized_icon("#{icon_name}_#{size}") do
- # We can't simply do the below, because there are some .erb SVGs.
- # File.read(Rails.root.join("app/views/shared/icons/_#{icon_name}.svg")).html_safe
- render "shared/icons/#{icon_name}.svg", size: size
+ render partial: "shared/icons/#{icon_name}", formats: :svg, locals: { size: size }
end
end
diff --git a/app/services/ci/retry_build_service.rb b/app/services/ci/retry_build_service.rb
index 07cfbb9ce3c..b142dde21b0 100644
--- a/app/services/ci/retry_build_service.rb
+++ b/app/services/ci/retry_build_service.rb
@@ -63,7 +63,7 @@ module Ci
def clone_build(build)
project.builds.new(build_attributes(build)).tap do |new_build|
- new_build.assign_attributes(::Gitlab::Ci::Pipeline::Seed::Build.environment_attributes_for(new_build))
+ new_build.assign_attributes(deployment_attributes_for(new_build, build))
end
end
@@ -75,6 +75,16 @@ module Ci
attributes[:user] = current_user
attributes
end
+
+ def deployment_attributes_for(new_build, old_build)
+ if Feature.enabled?(:sticky_environments_in_job_retry, project, default_enabled: :yaml)
+ ::Gitlab::Ci::Pipeline::Seed::Build
+ .deployment_attributes_for(new_build, old_build.persisted_environment)
+ else
+ ::Gitlab::Ci::Pipeline::Seed::Build
+ .deployment_attributes_for(new_build)
+ end
+ end
end
end
diff --git a/app/views/layouts/_head.html.haml b/app/views/layouts/_head.html.haml
index a89c621a55c..5ca4a2f9888 100644
--- a/app/views/layouts/_head.html.haml
+++ b/app/views/layouts/_head.html.haml
@@ -92,3 +92,5 @@
= render 'layouts/google_analytics' if extra_config.has_key?('google_analytics_id')
= render 'layouts/matomo' if extra_config.has_key?('matomo_url') && extra_config.has_key?('matomo_site_id')
= render 'layouts/snowplow'
+ -# This is needed by [GitLab JH](https://gitlab.com/gitlab-jh/gitlab/-/issues/184)
+ = render_if_exists "layouts/frontend_monitor"
diff --git a/config/feature_flags/development/sticky_environments_in_job_retry.yml b/config/feature_flags/development/sticky_environments_in_job_retry.yml
new file mode 100644
index 00000000000..d61178c4b38
--- /dev/null
+++ b/config/feature_flags/development/sticky_environments_in_job_retry.yml
@@ -0,0 +1,8 @@
+---
+name: sticky_environments_in_job_retry
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/72970
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/343874
+milestone: '14.5'
+type: development
+group: group::release
+default_enabled: false
diff --git a/db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb b/db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb
index 0a0fda7e870..f63645b4ffa 100644
--- a/db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb
+++ b/db/post_migrate/20210824174615_prepare_ci_builds_metadata_and_ci_build_async_indexes.rb
@@ -42,7 +42,7 @@ class PrepareCiBuildsMetadataAndCiBuildAsyncIndexes < ActiveRecord::Migration[6.
return if index_name_exists?(table_name, index_name)
- async_index = Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.safe_find_or_create_by!(name: index_name) do |rec|
+ async_index = Gitlab::Database::AsyncIndexes::PostgresAsyncIndex.find_or_create_by!(name: index_name) do |rec|
rec.table_name = table_name
rec.definition = definition
end
diff --git a/doc/user/discussions/index.md b/doc/user/discussions/index.md
index 919d5bad6e2..c0227cca94d 100644
--- a/doc/user/discussions/index.md
+++ b/doc/user/discussions/index.md
@@ -119,16 +119,15 @@ Notes are added to the page details.
If an issue or merge request is locked and closed, you cannot reopen it.
-## Mark a comment as confidential
+## Mark a comment as confidential **(FREE SELF)**
-> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/207473) in GitLab 13.9.
-> - [Deployed behind a feature flag](../feature_flags.md), disabled by default.
-> - Disabled on GitLab.com.
-> - Not recommended for production use.
-> - To use in GitLab self-managed instances, ask a GitLab administrator to enable it.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/207473) in GitLab 13.9 [with a flag](../../administration/feature_flags.md) named `confidential_notes`. Disabled by default.
-WARNING:
-This feature might not be available to you. Check the **version history** note above for details.
+FLAG:
+On self-managed GitLab, by default this feature is not available. To make it available,
+ask an administrator to [enable the feature flag](../../administration/feature_flags.md) named `confidential_notes`.
+On GitLab.com, this feature is not available.
+You should not use this feature for production environments.
You can make a comment confidential, so that it is visible only to project members
who have at least the Reporter role.
@@ -286,22 +285,3 @@ with a new push.
Threads are now resolved if a push makes a diff section outdated.
Threads on lines that don't change and top-level resolvable threads are not resolved.
-
-## Enable or disable confidential comments **(FREE SELF)**
-
-Confidential comments are under development and not ready for production use. The feature is
-deployed behind a feature flag that is **disabled by default**.
-[GitLab administrators with access to the GitLab Rails console](../../administration/feature_flags.md)
-can enable it.
-
-To enable it:
-
-```ruby
-Feature.enable(:confidential_notes)
-```
-
-To disable it:
-
-```ruby
-Feature.disable(:confidential_notes)
-```
diff --git a/doc/user/group/iterations/index.md b/doc/user/group/iterations/index.md
index 70fa3ba639d..c0331f655e1 100644
--- a/doc/user/group/iterations/index.md
+++ b/doc/user/group/iterations/index.md
@@ -144,7 +144,7 @@ To view an iteration report, go to the iterations list page and select an iterat
### Iteration burndown and burnup charts
-> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/222750) in GitLab 13.5.
+> - [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/222750) in GitLab 13.6.
> - [Feature flag removed](https://gitlab.com/gitlab-org/gitlab/-/issues/269972) in GitLab 13.7.
The iteration report includes [burndown and burnup charts](../../project/milestones/burndown_and_burnup_charts.md),
diff --git a/doc/user/permissions.md b/doc/user/permissions.md
index ea3577f7957..82eab4634cf 100644
--- a/doc/user/permissions.md
+++ b/doc/user/permissions.md
@@ -81,6 +81,15 @@ The following table lists project permissions available for each role:
| [GitLab Pages](project/pages/index.md):<br>Manage | | | | ✓ | ✓ |
| [GitLab Pages](project/pages/index.md):<br>Manage GitLab Pages domains and certificates | | | | ✓ | ✓ |
| [GitLab Pages](project/pages/index.md):<br>Remove GitLab Pages | | | | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>View [alerts](../operations/incident_management/alerts.md) | | ✓ | ✓ | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>Assign an alert | ✓| ✓ | ✓ | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>View [incident](../operations/incident_management/incidents.md) | ✓| ✓ | ✓ | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>Create [incident](../operations/incident_management/incidents.md) | ✓| ✓ | ✓ | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>View [on-call schedules](../operations/incident_management/oncall_schedules.md) | | ✓ | ✓ | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>Participate in on-call rotation | ✓| ✓ | ✓ | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>View [escalation policies](../operations/incident_management/escalation_policies.md) | | ✓ | ✓ | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>Manage [on-call schedules](../operations/incident_management/oncall_schedules.md) | | | | ✓ | ✓ |
+| [Incident Management](../operations/incident_management/index.md):<br>Manage [escalation policies](../operations/incident_management/escalation_policies.md)| | | | ✓ | ✓ |
| [Issues](project/issues/index.md):<br>Add Labels | ✓ (*16*) | ✓ | ✓ | ✓ | ✓ |
| [Issues](project/issues/index.md):<br>Assign | ✓ (*16*) | ✓ | ✓ | ✓ | ✓ |
| [Issues](project/issues/index.md):<br>Create | ✓ | ✓ | ✓ | ✓ | ✓ |
diff --git a/doc/user/project/integrations/webhook_events.md b/doc/user/project/integrations/webhook_events.md
index f5010629b82..dacf91c78b4 100644
--- a/doc/user/project/integrations/webhook_events.md
+++ b/doc/user/project/integrations/webhook_events.md
@@ -1684,3 +1684,8 @@ Payload example:
}
}
```
+
+NOTE:
+If an author has no public e-mail listed in their
+[GitLab profile](https://gitlab.com/-/profile), the `email` attribute displays
+a value of `["REDACTED"]`.
diff --git a/doc/user/project/settings/import_export.md b/doc/user/project/settings/import_export.md
index 3b393eaa151..c0e08987998 100644
--- a/doc/user/project/settings/import_export.md
+++ b/doc/user/project/settings/import_export.md
@@ -235,13 +235,16 @@ Review [issue 276930](https://gitlab.com/gitlab-org/gitlab/-/issues/276930), and
- Ensure shared runners are enabled in both the source and destination projects.
- Disable shared runners on the parent group when you import the project.
-### Import workaround for large repositories
+### Import workarounds for large repositories
[Maximum import size limitations](#import-the-project)
-can prevent an import from being successful.
-If changing the import limits is not possible,
-the following local workflow can be used to temporarily
-reduce the repository size for another import attempt.
+can prevent an import from being successful. If changing the import limits is not possible, you can
+try one of the workarounds listed here.
+
+#### Workaround option 1
+
+The following local workflow can be used to temporarily
+reduce the repository size for another import attempt:
1. Create a temporary working directory from the export:
@@ -291,6 +294,58 @@ reduce the repository size for another import attempt.
delete the temporary, `smaller-tmp-main` branch, and
the local, temporary data.
+#### Workaround option 2
+
+Rather than attempting to push all changes at once, this workaround:
+
+- Separates the project import from the Git Repository import
+- Incrementally pushes the repository to GitLab
+
+1. Make a local clone of the repository to migrate. In a later step, you push this clone outside of
+ the project export.
+1. Download the export and remove the `project.bundle` (which contains the Git repository):
+
+ ```shell
+ tar -czvf new_export.tar.gz --exclude='project.bundle' @old_export.tar.gz
+ ```
+
+1. Import the export without a Git repository. It asks you to confirm to import without a
+ repository.
+1. Save this bash script as a file and run it after adding the appropriate origin.
+
+ ```shell
+ #!/bin/sh
+
+ # ASSUMPTIONS:
+ # - The GitLab location is "origin"
+ # - The default branch is "main"
+ # - This will attempt to push in chunks of 500MB (dividing the total size by 500MB).
+ # Decrease this size to push in smaller chunks if you still receive timeouts.
+
+ git gc
+ SIZE=$(git count-objects -v 2> /dev/null | grep size-pack | awk '{print $2}')
+
+ # Be conservative... and try to push 2GB at a time
+ # (given this assumes each commit is the same size - which is wrong)
+ BATCHES=$(($SIZE / 500000))
+ TOTAL_COMMITS=$(git rev-list --count HEAD)
+ if (( BATCHES > TOTAL_COMMITS )); then
+ BATCHES=$TOTAL_COMMITS
+ fi
+
+ INCREMENTS=$(( ($TOTAL_COMMITS / $BATCHES) - 1 ))
+
+ for (( BATCH=BATCHES; BATCH>=1; BATCH-- ))
+ do
+ COMMIT_NUM=$(( $BATCH - $INCREMENTS ))
+ COMMIT_SHA=$(git log -n $COMMIT_NUM --format=format:%H | tail -1)
+ git push -u origin ${COMMIT_SHA}:refs/heads/main
+ done
+ git push -u origin main
+ git push -u origin -—all
+ git push -u origin -—tags
+ ```
+
### Manually execute export steps
Exports sometimes fail without giving enough information to troubleshoot. In these cases, it can be
diff --git a/lib/gitlab/ci/pipeline/seed/build.rb b/lib/gitlab/ci/pipeline/seed/build.rb
index 9ad5d6538b7..f223d1d4b4e 100644
--- a/lib/gitlab/ci/pipeline/seed/build.rb
+++ b/lib/gitlab/ci/pipeline/seed/build.rb
@@ -11,11 +11,11 @@ module Gitlab
delegate :dig, to: :@seed_attributes
- def initialize(context, attributes, previous_stages, current_stage)
+ def initialize(context, attributes, stages_for_needs_lookup = [])
@context = context
@pipeline = context.pipeline
@seed_attributes = attributes
- @stages_for_needs_lookup = (previous_stages + [current_stage]).compact
+ @stages_for_needs_lookup = stages_for_needs_lookup.compact
@needs_attributes = dig(:needs_attributes)
@resource_group_key = attributes.delete(:resource_group_key)
@job_variables = @seed_attributes.delete(:job_variables)
@@ -90,7 +90,7 @@ module Gitlab
::Ci::Bridge.new(attributes)
else
::Ci::Build.new(attributes).tap do |build|
- build.assign_attributes(self.class.environment_attributes_for(build))
+ build.assign_attributes(self.class.deployment_attributes_for(build))
end
end
end
@@ -101,10 +101,10 @@ module Gitlab
.to_resource
end
- def self.environment_attributes_for(build)
+ def self.deployment_attributes_for(build, environment = nil)
return {} unless build.has_environment?
- environment = Seed::Environment.new(build).to_resource
+ environment = Seed::Environment.new(build).to_resource if environment.nil?
unless environment.persisted?
if Feature.enabled?(:surface_environment_creation_failure, build.project, default_enabled: :yaml) &&
diff --git a/lib/gitlab/ci/pipeline/seed/stage.rb b/lib/gitlab/ci/pipeline/seed/stage.rb
index 018fb260986..bc56fe9bef9 100644
--- a/lib/gitlab/ci/pipeline/seed/stage.rb
+++ b/lib/gitlab/ci/pipeline/seed/stage.rb
@@ -17,7 +17,7 @@ module Gitlab
@previous_stages = previous_stages
@builds = attributes.fetch(:builds).map do |attributes|
- Seed::Build.new(context, attributes, previous_stages, self)
+ Seed::Build.new(context, attributes, previous_stages + [self])
end
end
diff --git a/lib/gitlab/database/async_indexes/index_creator.rb b/lib/gitlab/database/async_indexes/index_creator.rb
index 00de79ec970..994a1deba57 100644
--- a/lib/gitlab/database/async_indexes/index_creator.rb
+++ b/lib/gitlab/database/async_indexes/index_creator.rb
@@ -40,7 +40,7 @@ module Gitlab
end
def connection
- @connection ||= ApplicationRecord.connection
+ @connection ||= async_index.connection
end
def lease_timeout
diff --git a/lib/gitlab/database/async_indexes/postgres_async_index.rb b/lib/gitlab/database/async_indexes/postgres_async_index.rb
index 236459e6216..6cb40729061 100644
--- a/lib/gitlab/database/async_indexes/postgres_async_index.rb
+++ b/lib/gitlab/database/async_indexes/postgres_async_index.rb
@@ -3,7 +3,7 @@
module Gitlab
module Database
module AsyncIndexes
- class PostgresAsyncIndex < ApplicationRecord
+ class PostgresAsyncIndex < SharedModel
self.table_name = 'postgres_async_indexes'
MAX_IDENTIFIER_LENGTH = Gitlab::Database::MigrationHelpers::MAX_IDENTIFIER_NAME_LENGTH
diff --git a/lib/gitlab/database/postgres_index.rb b/lib/gitlab/database/postgres_index.rb
index 1079bfdeda3..4d938686ccf 100644
--- a/lib/gitlab/database/postgres_index.rb
+++ b/lib/gitlab/database/postgres_index.rb
@@ -2,7 +2,7 @@
module Gitlab
module Database
- class PostgresIndex < ActiveRecord::Base
+ class PostgresIndex < SharedModel
include Gitlab::Utils::StrongMemoize
self.table_name = 'postgres_indexes'
diff --git a/lib/gitlab/database/postgres_index_bloat_estimate.rb b/lib/gitlab/database/postgres_index_bloat_estimate.rb
index 379227bf87c..5c9b5777b74 100644
--- a/lib/gitlab/database/postgres_index_bloat_estimate.rb
+++ b/lib/gitlab/database/postgres_index_bloat_estimate.rb
@@ -6,7 +6,7 @@ module Gitlab
# for all indexes can be expensive in a large database.
#
# Best used on a per-index basis.
- class PostgresIndexBloatEstimate < ActiveRecord::Base
+ class PostgresIndexBloatEstimate < SharedModel
self.table_name = 'postgres_index_bloat_estimates'
self.primary_key = 'identifier'
diff --git a/lib/gitlab/database/reindexing.rb b/lib/gitlab/database/reindexing.rb
index 04b409a9306..6e2bad05b71 100644
--- a/lib/gitlab/database/reindexing.rb
+++ b/lib/gitlab/database/reindexing.rb
@@ -27,13 +27,14 @@ module Gitlab
Gitlab::AppLogger.info("Removing index #{index.identifier} which is a leftover, temporary index from previous reindexing activity")
retries = Gitlab::Database::WithLockRetriesOutsideTransaction.new(
+ connection: index.connection,
timing_configuration: REMOVE_INDEX_RETRY_CONFIG,
klass: self.class,
logger: Gitlab::AppLogger
)
retries.run(raise_on_exhaustion: false) do
- ApplicationRecord.connection.tap do |conn|
+ index.connection.tap do |conn|
conn.execute("DROP INDEX CONCURRENTLY IF EXISTS #{conn.quote_table_name(index.schema)}.#{conn.quote_table_name(index.name)}")
end
end
diff --git a/lib/gitlab/database/reindexing/reindex_action.rb b/lib/gitlab/database/reindexing/reindex_action.rb
index ff465fffb74..73424a76cfe 100644
--- a/lib/gitlab/database/reindexing/reindex_action.rb
+++ b/lib/gitlab/database/reindexing/reindex_action.rb
@@ -3,7 +3,7 @@
module Gitlab
module Database
module Reindexing
- class ReindexAction < ActiveRecord::Base
+ class ReindexAction < SharedModel
self.table_name = 'postgres_reindex_actions'
belongs_to :index, foreign_key: :index_identifier, class_name: 'Gitlab::Database::PostgresIndex'
diff --git a/lib/gitlab/database/reindexing/reindex_concurrently.rb b/lib/gitlab/database/reindexing/reindex_concurrently.rb
index 7a720f7c539..1a2e8bbcb2d 100644
--- a/lib/gitlab/database/reindexing/reindex_concurrently.rb
+++ b/lib/gitlab/database/reindexing/reindex_concurrently.rb
@@ -99,6 +99,7 @@ module Gitlab
logger.info("Removing dangling index #{index.identifier}")
retries = Gitlab::Database::WithLockRetriesOutsideTransaction.new(
+ connection: connection,
timing_configuration: REMOVE_INDEX_RETRY_CONFIG,
klass: self.class,
logger: logger
@@ -109,11 +110,6 @@ module Gitlab
end
end
- def with_lock_retries(&block)
- arguments = { klass: self.class, logger: logger }
- Gitlab::Database::WithLockRetries.new(**arguments).run(raise_on_exhaustion: true, &block)
- end
-
def set_statement_timeout
execute("SET statement_timeout TO '%ds'" % STATEMENT_TIMEOUT)
yield
@@ -123,7 +119,7 @@ module Gitlab
delegate :execute, :quote_table_name, to: :connection
def connection
- @connection ||= ActiveRecord::Base.connection
+ @connection ||= index.connection
end
end
end
diff --git a/lib/tasks/gitlab/db.rake b/lib/tasks/gitlab/db.rake
index e2647021914..4d8394a6a4d 100644
--- a/lib/tasks/gitlab/db.rake
+++ b/lib/tasks/gitlab/db.rake
@@ -161,31 +161,33 @@ namespace :gitlab do
end
desc 'reindex a regular index without downtime to eliminate bloat'
- task :reindex, [:index_name] => :environment do |_, args|
+ task :reindex, [:index_name, :database] => :environment do |_, args|
unless Feature.enabled?(:database_reindexing, type: :ops)
puts "This feature (database_reindexing) is currently disabled.".color(:yellow)
exit
end
- indexes = Gitlab::Database::PostgresIndex.reindexing_support
+ Gitlab::Database::EachDatabase.each_database_connection do |connection, connection_name|
+ indexes = Gitlab::Database::PostgresIndex.reindexing_support
- if identifier = args[:index_name]
- raise ArgumentError, "Index name is not fully qualified with a schema: #{identifier}" unless identifier =~ /^\w+\.\w+$/
+ if (identifier = args[:index_name]) && (args.fetch(:database, 'main') == connection_name)
+ raise ArgumentError, "Index name is not fully qualified with a schema: #{identifier}" unless identifier =~ /^\w+\.\w+$/
- indexes = indexes.where(identifier: identifier)
+ indexes = indexes.where(identifier: identifier)
- raise "Index not found or not supported: #{args[:index_name]}" if indexes.empty?
- end
+ raise "Index #{args[:index_name]} for #{connection_name} database not found or not supported" if indexes.empty?
+ end
- ActiveRecord::Base.logger = Logger.new($stdout) if Gitlab::Utils.to_boolean(ENV['LOG_QUERIES_TO_CONSOLE'], default: false)
+ Gitlab::Database::SharedModel.logger = Logger.new($stdout) if Gitlab::Utils.to_boolean(ENV['LOG_QUERIES_TO_CONSOLE'], default: false)
- # Cleanup leftover temporary indexes from previous, possibly aborted runs (if any)
- Gitlab::Database::Reindexing.cleanup_leftovers!
+ # Cleanup leftover temporary indexes from previous, possibly aborted runs (if any)
+ Gitlab::Database::Reindexing.cleanup_leftovers!
- # Hack: Before we do actual reindexing work, create async indexes
- Gitlab::Database::AsyncIndexes.create_pending_indexes! if Feature.enabled?(:database_async_index_creation, type: :ops)
+ # Hack: Before we do actual reindexing work, create async indexes
+ Gitlab::Database::AsyncIndexes.create_pending_indexes! if Feature.enabled?(:database_async_index_creation, type: :ops)
- Gitlab::Database::Reindexing.perform(indexes)
+ Gitlab::Database::Reindexing.perform(indexes)
+ end
rescue StandardError => e
Gitlab::AppLogger.error(e)
raise
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 3c6c84f763f..01e5000db10 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -40896,6 +40896,12 @@ msgstr ""
msgid "mrWidget|Approved by"
msgstr ""
+msgid "mrWidget|Approved by you"
+msgstr ""
+
+msgid "mrWidget|Approved by you and others"
+msgstr ""
+
msgid "mrWidget|Are you adding technical debt or code vulnerabilities?"
msgstr ""
@@ -40993,9 +40999,6 @@ msgstr ""
msgid "mrWidget|Merge locally"
msgstr ""
-msgid "mrWidget|Merge request approved."
-msgstr ""
-
msgid "mrWidget|Merged by"
msgstr ""
diff --git a/qa/Gemfile.lock b/qa/Gemfile.lock
index c3b92c78f03..48511b59e64 100644
--- a/qa/Gemfile.lock
+++ b/qa/Gemfile.lock
@@ -41,7 +41,7 @@ GEM
capybara-screenshot (1.0.23)
capybara (>= 1.0, < 4)
launchy
- chemlab (0.9.1)
+ chemlab (0.9.2)
colorize (~> 0.8)
i18n (~> 1.8)
rake (>= 12, < 14)
diff --git a/qa/lib/gitlab/page/admin/subscription.rb b/qa/lib/gitlab/page/admin/subscription.rb
index 0f7c6b4c211..cdd9bb20b42 100644
--- a/qa/lib/gitlab/page/admin/subscription.rb
+++ b/qa/lib/gitlab/page/admin/subscription.rb
@@ -6,7 +6,16 @@ module Gitlab
class Subscription < Chemlab::Page
path '/admin/subscription'
+ p :plan
+ p :started
+ p :name
+ p :company
+ p :email
+ h2 :billable_users
+ h2 :maximum_users
h2 :users_in_subscription
+ h2 :users_over_subscription
+ table :subscription_history
end
end
end
diff --git a/qa/lib/gitlab/page/admin/subscription.stub.rb b/qa/lib/gitlab/page/admin/subscription.stub.rb
index 51f23e7f0d0..89d7bfb95d9 100644
--- a/qa/lib/gitlab/page/admin/subscription.stub.rb
+++ b/qa/lib/gitlab/page/admin/subscription.stub.rb
@@ -4,6 +4,174 @@ module Gitlab
module Page
module Admin
module Subscription
+ # @note Defined as +p :plan+
+ # @return [String] The text content or value of +plan+
+ def plan
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.plan_element).to exist
+ # end
+ # @return [Watir::P] The raw +P+ element
+ def plan_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_plan
+ # end
+ # @return [Boolean] true if the +plan+ element is present on the page
+ def plan?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @note Defined as +p :started+
+ # @return [String] The text content or value of +started+
+ def started
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.started_element).to exist
+ # end
+ # @return [Watir::P] The raw +P+ element
+ def started_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_started
+ # end
+ # @return [Boolean] true if the +started+ element is present on the page
+ def started?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @note Defined as +p :name+
+ # @return [String] The text content or value of +name+
+ def name
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.name_element).to exist
+ # end
+ # @return [Watir::P] The raw +P+ element
+ def name_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_name
+ # end
+ # @return [Boolean] true if the +name+ element is present on the page
+ def name?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @note Defined as +p :company+
+ # @return [String] The text content or value of +company+
+ def company
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.company_element).to exist
+ # end
+ # @return [Watir::P] The raw +P+ element
+ def company_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_company
+ # end
+ # @return [Boolean] true if the +company+ element is present on the page
+ def company?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @note Defined as +p :email+
+ # @return [String] The text content or value of +email+
+ def email
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.email_element).to exist
+ # end
+ # @return [Watir::P] The raw +P+ element
+ def email_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_email
+ # end
+ # @return [Boolean] true if the +email+ element is present on the page
+ def email?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @note Defined as +h2 :billable_users+
+ # @return [String] The text content or value of +billable_users+
+ def billable_users
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.billable_users_element).to exist
+ # end
+ # @return [Watir::H2] The raw +H2+ element
+ def billable_users_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_billable_users
+ # end
+ # @return [Boolean] true if the +billable_users+ element is present on the page
+ def billable_users?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @note Defined as +h2 :maximum_users+
+ # @return [String] The text content or value of +maximum_users+
+ def maximum_users
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.maximum_users_element).to exist
+ # end
+ # @return [Watir::H2] The raw +H2+ element
+ def maximum_users_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_maximum_users
+ # end
+ # @return [Boolean] true if the +maximum_users+ element is present on the page
+ def maximum_users?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
# @note Defined as +h2 :users_in_subscription+
# @return [String] The text content or value of +users_in_subscription+
def users_in_subscription
@@ -27,6 +195,54 @@ module Gitlab
def users_in_subscription?
# This is a stub, used for indexing. The method is dynamically generated.
end
+
+ # @note Defined as +h2 :users_over_subscription+
+ # @return [String] The text content or value of +users_over_subscription+
+ def users_over_subscription
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.users_over_subscription_element).to exist
+ # end
+ # @return [Watir::H2] The raw +H2+ element
+ def users_over_subscription_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_users_over_subscription
+ # end
+ # @return [Boolean] true if the +users_over_subscription+ element is present on the page
+ def users_over_subscription?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @note Defined as +table :subscription_history+
+ # @return [String] The text content or value of +subscription_history+
+ def subscription_history
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription.subscription_history_element).to exist
+ # end
+ # @return [Watir::Table] The raw +Table+ element
+ def subscription_history_element
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
+
+ # @example
+ # Gitlab::Page::Admin::Subscription.perform do |subscription|
+ # expect(subscription).to be_subscription_history
+ # end
+ # @return [Boolean] true if the +subscription_history+ element is present on the page
+ def subscription_history?
+ # This is a stub, used for indexing. The method is dynamically generated.
+ end
end
end
end
diff --git a/qa/qa/support/helpers/plan.rb b/qa/qa/support/helpers/plan.rb
new file mode 100644
index 00000000000..298a6d3f036
--- /dev/null
+++ b/qa/qa/support/helpers/plan.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+module QA
+ module Support
+ module Helpers
+ module Plan
+ FREE = { name: 'free', price: 0, yearly_price: 0, ci_minutes: 400 }.freeze
+
+ PREMIUM = {
+ plan_id: '2c92a00d76f0d5060176f2fb0a5029ff',
+ rate_charge_id: '2c92a00d76f0d5060176f2fb0a672a02',
+ name: 'premium',
+ price: 19,
+ yearly_price: 228,
+ ci_minutes: 10000
+ }.freeze
+
+ PREMIUM_SELF_MANAGED = {
+ plan_id: '2c92a01176f0d50a0176f3043c4d4a53',
+ rate_charge_id: '2c92a01176f0d50a0176f3043c6a4a58',
+ name: 'premium',
+ price: 19,
+ yearly_price: 228
+ }.freeze
+
+ ULTIMATE = {
+ plan_id: '2c92a0ff76f0d5250176f2f8c86f305a',
+ rate_charge_id: '2c92a0ff76f0d5250176f2f8c896305c',
+ name: 'ultimate',
+ price: 99,
+ yearly_price: 1188,
+ ci_minutes: 50000
+ }.freeze
+
+ ULTIMATE_SELF_MANAGED = {
+ plan_id: '2c92a00c76f0c6c20176f2f9328b33c9',
+ rate_charge_id: '2c92a00c76f0c6c20176f2fcbb645b5f',
+ name: 'ultimate',
+ price: 99,
+ yearly_price: 1188
+ }.freeze
+
+ CI_MINUTES = {
+ plan_id: '2c92a0086a07f4a8016a2c0a1f7b4b4c',
+ rate_charge_id: '2c92a0fd6a07f4c6016a2c0af07c3f21',
+ name: 'ci_minutes',
+ price: 10,
+ ci_minutes: 1000
+ }.freeze
+
+ STORAGE = {
+ plan_id: '2c92a00f7279a6f5017279d299d01cf9',
+ rate_charge_id: '2c92a0ff7279a74f017279d5bea71fc5',
+ name: 'storage',
+ price: 60,
+ storage: 10
+ }.freeze
+
+ LICENSE_TYPE = {
+ license_file: 'license file',
+ cloud_license: 'cloud license'
+ }.freeze
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_request/user_approves_spec.rb b/spec/features/merge_request/user_approves_spec.rb
index f401dd598f3..4f7bcb58551 100644
--- a/spec/features/merge_request/user_approves_spec.rb
+++ b/spec/features/merge_request/user_approves_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe 'Merge request > User approves', :js do
it 'approves merge request' do
click_approval_button('Approve')
- expect(page).to have_content('Merge request approved')
+ expect(page).to have_content('Approved by you')
verify_approvals_count_on_index!
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 59db537282b..2cb8b0b679e 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -1,5 +1,5 @@
import { GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount, mount, createLocalVue } from '@vue/test-utils';
+import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
@@ -19,6 +19,14 @@ import TextViewer from '~/repository/components/blob_viewers/text_viewer.vue';
import blobInfoQuery from '~/repository/queries/blob_info.query.graphql';
import { redirectTo } from '~/lib/utils/url_utility';
import { isLoggedIn } from '~/lib/utils/common_utils';
+import {
+ simpleViewerMock,
+ richViewerMock,
+ projectMock,
+ userPermissionsMock,
+ propsMock,
+ refMock,
+} from '../mock_data';
jest.mock('~/repository/components/blob_viewers');
jest.mock('~/lib/utils/url_utility');
@@ -27,147 +35,56 @@ jest.mock('~/lib/utils/common_utils');
let wrapper;
let mockResolver;
-const simpleMockData = {
- name: 'some_file.js',
- size: 123,
- rawSize: 123,
- rawTextBlob: 'raw content',
- type: 'text',
- fileType: 'text',
- tooLarge: false,
- path: 'some_file.js',
- webPath: 'some_file.js',
- editBlobPath: 'some_file.js/edit',
- ideEditPath: 'some_file.js/ide/edit',
- forkAndEditPath: 'some_file.js/fork/edit',
- ideForkAndEditPath: 'some_file.js/fork/ide',
- canModifyBlob: true,
- storedExternally: false,
- rawPath: 'some_file.js',
- externalStorageUrl: 'some_file.js',
- replacePath: 'some_file.js/replace',
- deletePath: 'some_file.js/delete',
- simpleViewer: {
- fileType: 'text',
- tooLarge: false,
- type: 'simple',
- renderError: null,
- },
- richViewer: null,
-};
-const richMockData = {
- ...simpleMockData,
- richViewer: {
- fileType: 'markup',
- tooLarge: false,
- type: 'rich',
- renderError: null,
- },
-};
-
-const projectMockData = {
- userPermissions: {
- pushCode: true,
- downloadCode: true,
- createMergeRequestIn: true,
- forkProject: true,
- },
- repository: {
- empty: false,
- },
-};
-
const localVue = createLocalVue();
const mockAxios = new MockAdapter(axios);
-const createComponentWithApollo = (mockData = {}, inject = {}) => {
+const createComponent = async (mockData = {}, mountFn = shallowMount) => {
localVue.use(VueApollo);
- const defaultPushCode = projectMockData.userPermissions.pushCode;
- const defaultDownloadCode = projectMockData.userPermissions.downloadCode;
- const defaultEmptyRepo = projectMockData.repository.empty;
const {
- blobs,
- emptyRepo = defaultEmptyRepo,
- canPushCode = defaultPushCode,
- canDownloadCode = defaultDownloadCode,
- createMergeRequestIn = projectMockData.userPermissions.createMergeRequestIn,
- forkProject = projectMockData.userPermissions.forkProject,
- pathLocks = [],
+ blob = simpleViewerMock,
+ empty = projectMock.repository.empty,
+ pushCode = userPermissionsMock.pushCode,
+ forkProject = userPermissionsMock.forkProject,
+ downloadCode = userPermissionsMock.downloadCode,
+ createMergeRequestIn = userPermissionsMock.createMergeRequestIn,
+ isBinary,
+ inject = {},
} = mockData;
- mockResolver = jest.fn().mockResolvedValue({
- data: {
- project: {
- id: '1234',
- userPermissions: {
- pushCode: canPushCode,
- downloadCode: canDownloadCode,
- createMergeRequestIn,
- forkProject,
- },
- pathLocks: {
- nodes: pathLocks,
- },
- repository: {
- empty: emptyRepo,
- blobs: {
- nodes: [blobs],
- },
- },
- },
+ const project = {
+ ...projectMock,
+ userPermissions: {
+ pushCode,
+ forkProject,
+ downloadCode,
+ createMergeRequestIn,
+ },
+ repository: {
+ empty,
+ blobs: { nodes: [blob] },
},
+ };
+
+ mockResolver = jest.fn().mockResolvedValue({
+ data: { isBinary, project },
});
const fakeApollo = createMockApollo([[blobInfoQuery, mockResolver]]);
- wrapper = shallowMount(BlobContentViewer, {
+ wrapper = mountFn(BlobContentViewer, {
localVue,
apolloProvider: fakeApollo,
- propsData: {
- path: 'some_file.js',
- projectPath: 'some/path',
- },
- mixins: [
- {
- data: () => ({ ref: 'default-ref' }),
- },
- ],
- provide: {
- ...inject,
- },
+ propsData: propsMock,
+ mixins: [{ data: () => ({ ref: refMock }) }],
+ provide: { ...inject },
});
-};
-const createFactory = (mountFn) => (
- { props = {}, mockData = {}, stubs = {} } = {},
- loading = false,
-) => {
- wrapper = mountFn(BlobContentViewer, {
- propsData: {
- path: 'some_file.js',
- projectPath: 'some/path',
- ...props,
- },
- mocks: {
- $apollo: {
- queries: {
- project: {
- loading,
- refetch: jest.fn(),
- },
- },
- },
- },
- stubs,
- });
+ wrapper.setData({ project, isBinary });
- wrapper.setData(mockData);
+ await waitForPromises();
};
-const factory = createFactory(shallowMount);
-const fullFactory = createFactory(mount);
-
describe('Blob content viewer component', () => {
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findBlobHeader = () => wrapper.findComponent(BlobHeader);
@@ -187,25 +104,24 @@ describe('Blob content viewer component', () => {
});
it('renders a GlLoadingIcon component', () => {
- factory({ mockData: { blobInfo: simpleMockData } }, true);
+ createComponent();
expect(findLoadingIcon().exists()).toBe(true);
});
describe('simple viewer', () => {
- beforeEach(() => {
- factory({ mockData: { blobInfo: simpleMockData } });
- });
+ it('renders a BlobHeader component', async () => {
+ await createComponent();
- it('renders a BlobHeader component', () => {
expect(findBlobHeader().props('activeViewerType')).toEqual('simple');
expect(findBlobHeader().props('hasRenderError')).toEqual(false);
expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(true);
- expect(findBlobHeader().props('blob')).toEqual(simpleMockData);
+ expect(findBlobHeader().props('blob')).toEqual(simpleViewerMock);
});
- it('renders a BlobContent component', () => {
- expect(findBlobContent().props('loading')).toEqual(false);
+ it('renders a BlobContent component', async () => {
+ await createComponent();
+
expect(findBlobContent().props('isRawContent')).toBe(true);
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'text',
@@ -217,8 +133,7 @@ describe('Blob content viewer component', () => {
describe('legacy viewers', () => {
it('loads a legacy viewer when a viewer component is not available', async () => {
- createComponentWithApollo({ blobs: { ...simpleMockData, fileType: 'unknown' } });
- await waitForPromises();
+ await createComponent({ blob: { ...simpleViewerMock, fileType: 'unknown' } });
expect(mockAxios.history.get).toHaveLength(1);
expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=simple');
@@ -227,21 +142,18 @@ describe('Blob content viewer component', () => {
});
describe('rich viewer', () => {
- beforeEach(() => {
- factory({
- mockData: { blobInfo: richMockData, activeViewerType: 'rich' },
- });
- });
+ it('renders a BlobHeader component', async () => {
+ await createComponent({ blob: richViewerMock });
- it('renders a BlobHeader component', () => {
expect(findBlobHeader().props('activeViewerType')).toEqual('rich');
expect(findBlobHeader().props('hasRenderError')).toEqual(false);
expect(findBlobHeader().props('hideViewerSwitcher')).toEqual(false);
- expect(findBlobHeader().props('blob')).toEqual(richMockData);
+ expect(findBlobHeader().props('blob')).toEqual(richViewerMock);
});
- it('renders a BlobContent component', () => {
- expect(findBlobContent().props('loading')).toEqual(false);
+ it('renders a BlobContent component', async () => {
+ await createComponent({ blob: richViewerMock });
+
expect(findBlobContent().props('isRawContent')).toBe(true);
expect(findBlobContent().props('activeViewer')).toEqual({
fileType: 'markup',
@@ -252,6 +164,8 @@ describe('Blob content viewer component', () => {
});
it('updates viewer type when viewer changed is clicked', async () => {
+ await createComponent({ blob: richViewerMock });
+
expect(findBlobContent().props('activeViewer')).toEqual(
expect.objectContaining({
type: 'rich',
@@ -273,8 +187,7 @@ describe('Blob content viewer component', () => {
describe('legacy viewers', () => {
it('loads a legacy viewer when a viewer component is not available', async () => {
- createComponentWithApollo({ blobs: { ...richMockData, fileType: 'unknown' } });
- await waitForPromises();
+ await createComponent({ blob: { ...richViewerMock, fileType: 'unknown' } });
expect(mockAxios.history.get).toHaveLength(1);
expect(mockAxios.history.get[0].url).toEqual('some_file.js?format=json&viewer=rich');
@@ -287,9 +200,9 @@ describe('Blob content viewer component', () => {
viewerProps.mockRestore();
});
- it('does not render a BlobContent component if a Blob viewer is available', () => {
- loadViewer.mockReturnValueOnce(() => true);
- factory({ mockData: { blobInfo: richMockData } });
+ it('does not render a BlobContent component if a Blob viewer is available', async () => {
+ loadViewer.mockReturnValue(() => true);
+ await createComponent({ blob: richViewerMock });
expect(findBlobContent().exists()).toBe(false);
});
@@ -305,15 +218,13 @@ describe('Blob content viewer component', () => {
loadViewer.mockReturnValue(loadViewerReturnValue);
viewerProps.mockReturnValue(viewerPropsReturnValue);
- factory({
- mockData: {
- blobInfo: {
- ...simpleMockData,
- fileType: null,
- simpleViewer: {
- ...simpleMockData.simpleViewer,
- fileType: viewer,
- },
+ createComponent({
+ blob: {
+ ...simpleViewerMock,
+ fileType: 'null',
+ simpleViewer: {
+ ...simpleViewerMock.simpleViewer,
+ fileType: viewer,
},
},
});
@@ -327,18 +238,10 @@ describe('Blob content viewer component', () => {
});
describe('BlobHeader action slot', () => {
- const { ideEditPath, editBlobPath } = simpleMockData;
+ const { ideEditPath, editBlobPath } = simpleViewerMock;
it('renders BlobHeaderEdit buttons in simple viewer', async () => {
- fullFactory({
- mockData: { blobInfo: simpleMockData },
- stubs: {
- BlobContent: true,
- BlobReplace: true,
- },
- });
-
- await nextTick();
+ await createComponent({ inject: { BlobContent: true, BlobReplace: true } }, mount);
expect(findBlobEdit().props()).toMatchObject({
editPath: editBlobPath,
@@ -348,15 +251,7 @@ describe('Blob content viewer component', () => {
});
it('renders BlobHeaderEdit button in rich viewer', async () => {
- fullFactory({
- mockData: { blobInfo: richMockData },
- stubs: {
- BlobContent: true,
- BlobReplace: true,
- },
- });
-
- await nextTick();
+ await createComponent({ blob: richViewerMock }, mount);
expect(findBlobEdit().props()).toMatchObject({
editPath: editBlobPath,
@@ -366,15 +261,7 @@ describe('Blob content viewer component', () => {
});
it('renders BlobHeaderEdit button for binary files', async () => {
- fullFactory({
- mockData: { blobInfo: richMockData, isBinary: true },
- stubs: {
- BlobContent: true,
- BlobReplace: true,
- },
- });
-
- await nextTick();
+ await createComponent({ blob: richViewerMock, isBinary: true }, mount);
expect(findBlobEdit().props()).toMatchObject({
editPath: editBlobPath,
@@ -384,41 +271,26 @@ describe('Blob content viewer component', () => {
});
describe('blob header binary file', () => {
- it.each([richMockData, { simpleViewer: { fileType: 'download' } }])(
- 'passes the correct isBinary value when viewing a binary file',
- async (blobInfo) => {
- fullFactory({
- mockData: {
- blobInfo,
- isBinary: true,
- },
- stubs: { BlobContent: true, BlobReplace: true },
- });
-
- await nextTick();
+ it('passes the correct isBinary value when viewing a binary file', async () => {
+ await createComponent({ blob: richViewerMock, isBinary: true });
- expect(findBlobHeader().props('isBinary')).toBe(true);
- },
- );
+ expect(findBlobHeader().props('isBinary')).toBe(true);
+ });
it('passes the correct header props when viewing a non-text file', async () => {
- fullFactory({
- mockData: {
- blobInfo: {
- ...simpleMockData,
+ await createComponent(
+ {
+ blob: {
+ ...simpleViewerMock,
simpleViewer: {
- ...simpleMockData.simpleViewer,
+ ...simpleViewerMock.simpleViewer,
fileType: 'image',
},
},
+ isBinary: true,
},
- stubs: {
- BlobContent: true,
- BlobReplace: true,
- },
- });
-
- await nextTick();
+ mount,
+ );
expect(findBlobHeader().props('hideViewerSwitcher')).toBe(true);
expect(findBlobHeader().props('isBinary')).toBe(true);
@@ -427,27 +299,16 @@ describe('Blob content viewer component', () => {
});
describe('BlobButtonGroup', () => {
- const { name, path, replacePath, webPath } = simpleMockData;
+ const { name, path, replacePath, webPath } = simpleViewerMock;
const {
userPermissions: { pushCode, downloadCode },
repository: { empty },
- } = projectMockData;
+ } = projectMock;
it('renders component', async () => {
window.gon.current_user_id = 1;
- fullFactory({
- mockData: {
- blobInfo: simpleMockData,
- project: { userPermissions: { pushCode, downloadCode }, repository: { empty } },
- },
- stubs: {
- BlobContent: true,
- BlobButtonGroup: true,
- },
- });
-
- await nextTick();
+ await createComponent({ pushCode, downloadCode, empty }, mount);
expect(findBlobButtonGroup().props()).toMatchObject({
name,
@@ -467,21 +328,14 @@ describe('Blob content viewer component', () => {
${false} | ${true} | ${false}
${true} | ${false} | ${false}
`('passes the correct lock states', async ({ canPushCode, canDownloadCode, canLock }) => {
- fullFactory({
- mockData: {
- blobInfo: simpleMockData,
- project: {
- userPermissions: { pushCode: canPushCode, downloadCode: canDownloadCode },
- repository: { empty },
- },
+ await createComponent(
+ {
+ pushCode: canPushCode,
+ downloadCode: canDownloadCode,
+ empty,
},
- stubs: {
- BlobContent: true,
- BlobButtonGroup: true,
- },
- });
-
- await nextTick();
+ mount,
+ );
expect(findBlobButtonGroup().props('canLock')).toBe(canLock);
});
@@ -489,15 +343,7 @@ describe('Blob content viewer component', () => {
it('does not render if not logged in', async () => {
isLoggedIn.mockReturnValueOnce(false);
- fullFactory({
- mockData: { blobInfo: simpleMockData },
- stubs: {
- BlobContent: true,
- BlobReplace: true,
- },
- });
-
- await nextTick();
+ await createComponent();
expect(findBlobButtonGroup().exists()).toBe(false);
});
@@ -506,10 +352,7 @@ describe('Blob content viewer component', () => {
describe('blob info query', () => {
it('is called with originalBranch value if the prop has a value', async () => {
- const inject = { originalBranch: 'some-branch' };
- createComponentWithApollo({ blobs: simpleMockData }, inject);
-
- await waitForPromises();
+ await createComponent({ inject: { originalBranch: 'some-branch' } });
expect(mockResolver).toHaveBeenCalledWith(
expect.objectContaining({
@@ -519,10 +362,7 @@ describe('Blob content viewer component', () => {
});
it('is called with ref value if the originalBranch prop has no value', async () => {
- const inject = { originalBranch: null };
- createComponentWithApollo({ blobs: simpleMockData }, inject);
-
- await waitForPromises();
+ await createComponent();
expect(mockResolver).toHaveBeenCalledWith(
expect.objectContaining({
@@ -533,24 +373,16 @@ describe('Blob content viewer component', () => {
});
describe('edit blob', () => {
- beforeEach(() => {
- fullFactory({
- mockData: { blobInfo: simpleMockData },
- stubs: {
- BlobContent: true,
- BlobReplace: true,
- },
- });
- });
+ beforeEach(() => createComponent({}, mount));
it('simple edit redirects to the simple editor', () => {
findBlobEdit().vm.$emit('edit', 'simple');
- expect(redirectTo).toHaveBeenCalledWith(simpleMockData.editBlobPath);
+ expect(redirectTo).toHaveBeenCalledWith(simpleViewerMock.editBlobPath);
});
it('IDE edit redirects to the IDE editor', () => {
findBlobEdit().vm.$emit('edit', 'ide');
- expect(redirectTo).toHaveBeenCalledWith(simpleMockData.ideEditPath);
+ expect(redirectTo).toHaveBeenCalledWith(simpleViewerMock.ideEditPath);
});
it.each`
@@ -569,16 +401,14 @@ describe('Blob content viewer component', () => {
showForkSuggestion,
}) => {
isLoggedIn.mockReturnValueOnce(loggedIn);
- fullFactory({
- mockData: {
- blobInfo: { ...simpleMockData, canModifyBlob },
- project: { userPermissions: { createMergeRequestIn, forkProject } },
+ await createComponent(
+ {
+ blob: { ...simpleViewerMock, canModifyBlob },
+ createMergeRequestIn,
+ forkProject,
},
- stubs: {
- BlobContent: true,
- BlobButtonGroup: true,
- },
- });
+ mount,
+ );
findBlobEdit().vm.$emit('edit', 'simple');
await nextTick();
diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js
new file mode 100644
index 00000000000..a3c60032c8c
--- /dev/null
+++ b/spec/frontend/repository/mock_data.js
@@ -0,0 +1,56 @@
+export const simpleViewerMock = {
+ name: 'some_file.js',
+ size: 123,
+ rawSize: 123,
+ rawTextBlob: 'raw content',
+ fileType: 'text',
+ path: 'some_file.js',
+ webPath: 'some_file.js',
+ editBlobPath: 'some_file.js/edit',
+ ideEditPath: 'some_file.js/ide/edit',
+ forkAndEditPath: 'some_file.js/fork/edit',
+ ideForkAndEditPath: 'some_file.js/fork/ide',
+ canModifyBlob: true,
+ storedExternally: false,
+ rawPath: 'some_file.js',
+ replacePath: 'some_file.js/replace',
+ simpleViewer: {
+ fileType: 'text',
+ tooLarge: false,
+ type: 'simple',
+ renderError: null,
+ },
+ richViewer: null,
+};
+
+export const richViewerMock = {
+ ...simpleViewerMock,
+ richViewer: {
+ fileType: 'markup',
+ tooLarge: false,
+ type: 'rich',
+ renderError: null,
+ },
+};
+
+export const userPermissionsMock = {
+ pushCode: true,
+ forkProject: true,
+ downloadCode: true,
+ createMergeRequestIn: true,
+};
+
+export const projectMock = {
+ id: '1234',
+ userPermissions: userPermissionsMock,
+ pathLocks: {
+ nodes: [],
+ },
+ repository: {
+ empty: false,
+ },
+};
+
+export const propsMock = { path: 'some_file.js', projectPath: 'some/path' };
+
+export const refMock = 'default-ref';
diff --git a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
index c9dea4394f9..c2606346292 100644
--- a/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
+++ b/spec/frontend/vue_mr_widget/components/approvals/approvals_summary_spec.js
@@ -1,14 +1,20 @@
import { shallowMount } from '@vue/test-utils';
import { toNounSeriesText } from '~/lib/utils/grammar';
import ApprovalsSummary from '~/vue_merge_request_widget/components/approvals/approvals_summary.vue';
-import { APPROVED_MESSAGE } from '~/vue_merge_request_widget/components/approvals/messages';
+import {
+ APPROVED_BY_OTHERS,
+ APPROVED_BY_YOU,
+ APPROVED_BY_YOU_AND_OTHERS,
+} from '~/vue_merge_request_widget/components/approvals/messages';
import UserAvatarList from '~/vue_shared/components/user_avatar/user_avatar_list.vue';
+const exampleUserId = 1;
const testApprovers = () => Array.from({ length: 5 }, (_, i) => i).map((id) => ({ id }));
const testRulesLeft = () => ['Lorem', 'Ipsum', 'dolar & sit'];
const TEST_APPROVALS_LEFT = 3;
describe('MRWidget approvals summary', () => {
+ const originalUserId = gon.current_user_id;
let wrapper;
const createComponent = (props = {}) => {
@@ -28,6 +34,7 @@ describe('MRWidget approvals summary', () => {
afterEach(() => {
wrapper.destroy();
wrapper = null;
+ gon.current_user_id = originalUserId;
});
describe('when approved', () => {
@@ -38,7 +45,7 @@ describe('MRWidget approvals summary', () => {
});
it('shows approved message', () => {
- expect(wrapper.text()).toContain(APPROVED_MESSAGE);
+ expect(wrapper.text()).toContain(APPROVED_BY_OTHERS);
});
it('renders avatar list for approvers', () => {
@@ -51,6 +58,48 @@ describe('MRWidget approvals summary', () => {
}),
);
});
+
+ describe('by the current user', () => {
+ beforeEach(() => {
+ gon.current_user_id = exampleUserId;
+ createComponent({
+ approvers: [{ id: exampleUserId }],
+ approved: true,
+ });
+ });
+
+ it('shows "Approved by you" message', () => {
+ expect(wrapper.text()).toContain(APPROVED_BY_YOU);
+ });
+ });
+
+ describe('by the current user and others', () => {
+ beforeEach(() => {
+ gon.current_user_id = exampleUserId;
+ createComponent({
+ approvers: [{ id: exampleUserId }, { id: exampleUserId + 1 }],
+ approved: true,
+ });
+ });
+
+ it('shows "Approved by you and others" message', () => {
+ expect(wrapper.text()).toContain(APPROVED_BY_YOU_AND_OTHERS);
+ });
+ });
+
+ describe('by other users than the current user', () => {
+ beforeEach(() => {
+ gon.current_user_id = exampleUserId;
+ createComponent({
+ approvers: [{ id: exampleUserId + 1 }],
+ approved: true,
+ });
+ });
+
+ it('shows "Approved by others" message', () => {
+ expect(wrapper.text()).toContain(APPROVED_BY_OTHERS);
+ });
+ });
});
describe('when not approved', () => {
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 3aa6b2e3c05..b549f367fdb 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:previous_stages) { [] }
let(:current_stage) { double(seeds_names: [attributes[:name]]) }
- let(:seed_build) { described_class.new(seed_context, attributes, previous_stages, current_stage) }
+ let(:seed_build) { described_class.new(seed_context, attributes, previous_stages + [current_stage]) }
describe '#attributes' do
subject { seed_build.attributes }
@@ -393,12 +393,14 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
describe '#to_resource' do
subject { seed_build.to_resource }
- context 'when job is not a bridge' do
+ context 'when job is Ci::Build' do
it { is_expected.to be_a(::Ci::Build) }
it { is_expected.to be_valid }
shared_examples_for 'deployment job' do
it 'returns a job with deployment' do
+ expect { subject }.to change { Environment.count }.by(1)
+
expect(subject.deployment).not_to be_nil
expect(subject.deployment.deployable).to eq(subject)
expect(subject.deployment.environment.name).to eq(expected_environment_name)
@@ -413,6 +415,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
shared_examples_for 'ensures environment existence' do
it 'has environment' do
+ expect { subject }.to change { Environment.count }.by(1)
+
expect(subject).to be_has_environment
expect(subject.environment).to eq(environment_name)
expect(subject.metadata.expanded_environment_name).to eq(expected_environment_name)
@@ -422,6 +426,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
shared_examples_for 'ensures environment inexistence' do
it 'does not have environment' do
+ expect { subject }.not_to change { Environment.count }
+
expect(subject).not_to be_has_environment
expect(subject.environment).to be_nil
expect(subject.metadata&.expanded_environment_name).to be_nil
diff --git a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
index 434cba4edde..223730f87c0 100644
--- a/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/postgres_async_index_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::AsyncIndexes::PostgresAsyncIndex, type: :model do
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe 'validations' do
let(:identifier_limit) { described_class::MAX_IDENTIFIER_LENGTH }
let(:definition_limit) { described_class::MAX_DEFINITION_LENGTH }
diff --git a/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb b/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb
index da4422bd442..13ac9190ab7 100644
--- a/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_bloat_estimate_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe Gitlab::Database::PostgresIndexBloatEstimate do
let(:identifier) { 'public.schema_migrations_pkey' }
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe '#bloat_size' do
it 'returns the bloat size in bytes' do
# We cannot reach much more about the bloat size estimate here
diff --git a/spec/lib/gitlab/database/postgres_index_spec.rb b/spec/lib/gitlab/database/postgres_index_spec.rb
index 9088719d5a4..db66736676b 100644
--- a/spec/lib/gitlab/database/postgres_index_spec.rb
+++ b/spec/lib/gitlab/database/postgres_index_spec.rb
@@ -22,6 +22,8 @@ RSpec.describe Gitlab::Database::PostgresIndex do
it_behaves_like 'a postgres model'
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe '.reindexing_support' do
it 'only non partitioned indexes' do
expect(described_class.reindexing_support).to all(have_attributes(partitioned: false))
diff --git a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
index a8f196d8f0e..1b409924acc 100644
--- a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
@@ -11,6 +11,8 @@ RSpec.describe Gitlab::Database::Reindexing::ReindexAction do
swapout_view_for_table(:postgres_indexes)
end
+ it { is_expected.to be_a Gitlab::Database::SharedModel }
+
describe '.create_for' do
subject { described_class.create_for(index) }
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index 15c88c9f657..1bf6c7a2c82 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -323,6 +323,53 @@ RSpec.describe Ci::RetryBuildService do
it 'persists expanded environment name' do
expect(new_build.metadata.expanded_environment_name).to eq('production')
end
+
+ it 'does not create a new environment' do
+ expect { new_build }.not_to change { Environment.count }
+ end
+ end
+
+ context 'when build with dynamic environment is retried' do
+ let_it_be(:other_developer) { create(:user).tap { |u| project.add_developer(other_developer) } }
+
+ let(:environment_name) { 'review/$CI_COMMIT_REF_SLUG-$GITLAB_USER_ID' }
+
+ let!(:build) do
+ create(:ci_build, :with_deployment, environment: environment_name,
+ options: { environment: { name: environment_name } },
+ pipeline: pipeline, stage_id: stage.id, project: project,
+ user: other_developer)
+ end
+
+ it 're-uses the previous persisted environment' do
+ expect(build.persisted_environment.name).to eq("review/#{build.ref}-#{other_developer.id}")
+
+ expect(new_build.persisted_environment.name).to eq("review/#{build.ref}-#{other_developer.id}")
+ end
+
+ it 'creates a new deployment' do
+ expect { new_build }.to change { Deployment.count }.by(1)
+ end
+
+ it 'does not create a new environment' do
+ expect { new_build }.not_to change { Environment.count }
+ end
+
+ context 'when sticky_environments_in_job_retry feature flag is disabled' do
+ before do
+ stub_feature_flags(sticky_environments_in_job_retry: false)
+ end
+
+ it 'creates a new environment' do
+ expect { new_build }.to change { Environment.count }
+ end
+
+ it 'ignores the previous persisted environment' do
+ expect(build.persisted_environment.name).to eq("review/#{build.ref}-#{other_developer.id}")
+
+ expect(new_build.persisted_environment.name).to eq("review/#{build.ref}-#{developer.id}")
+ end
+ end
end
context 'when build has needs' do
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index ad4ada9a9f1..e485ba6f5d8 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -252,10 +252,26 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
run_rake_task('gitlab:db:reindex', '[public.foo_idx]')
end
+ context 'when database name is provided' do
+ it 'calls the index rebuilder with the proper arguments when the database name match' do
+ allow(indexes).to receive(:where).with(identifier: 'public.foo_idx').and_return([index])
+ expect(Gitlab::Database::Reindexing).to receive(:perform).with([index])
+
+ run_rake_task('gitlab:db:reindex', '[public.foo_idx,main]')
+ end
+
+ it 'ignores the index and uses all candidate indexes if database name does not match' do
+ expect(Gitlab::Database::PostgresIndex).to receive(:reindexing_support).and_return(indexes)
+ expect(Gitlab::Database::Reindexing).to receive(:perform).with(indexes)
+
+ run_rake_task('gitlab:db:reindex', '[public.foo_idx,ci]')
+ end
+ end
+
it 'raises an error if the index does not exist' do
allow(indexes).to receive(:where).with(identifier: 'public.absent_index').and_return([])
- expect { run_rake_task('gitlab:db:reindex', '[public.absent_index]') }.to raise_error(/Index not found/)
+ expect { run_rake_task('gitlab:db:reindex', '[public.absent_index]') }.to raise_error(/Index public.absent_index for main database not found or not supported/)
end
it 'raises an error if the index is not fully qualified with a schema' do