Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2023-12-15 09:13:20 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2023-12-15 09:13:20 +0300
commit5e9a1717166c6b9bf0a6cbd00137d5c0043ba442 (patch)
tree30e54c2e3c41ad4caf51b0a60b46549d3feb3a7c
parent00880613328c66f85aee755fcd1e70ce4cb9fcdf (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_todo/rspec/named_subject.yml1
-rw-r--r--.rubocop_todo/style/inline_disable_annotation.yml1
-rw-r--r--Gemfile5
-rw-r--r--Gemfile.checksum2
-rw-r--r--Gemfile.lock4
-rw-r--r--app/assets/javascripts/issuable/components/locked_badge.vue2
-rw-r--r--app/assets/javascripts/vue_shared/components/notes/noteable_warning.vue2
-rw-r--r--app/views/profiles/keys/_form.html.haml13
-rw-r--r--app/workers/process_commit_worker.rb2
-rw-r--r--doc/administration/audit_event_streaming/index.md79
-rw-r--r--gems/gitlab-backup-cli/lib/gitlab/backup/cli.rb1
-rw-r--r--gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils.rb11
-rw-r--r--gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils/pg_dump.rb59
-rw-r--r--gems/gitlab-backup-cli/sig/gitlab/backup/cli.rbs1
-rw-r--r--gems/gitlab-backup-cli/sig/gitlab/backup/cli/utils/pg_dump.rbs25
-rw-r--r--gems/gitlab-backup-cli/spec/gitlab/backup/cli/utils/pg_dump_spec.rb81
-rw-r--r--lib/backup/database.rb92
-rw-r--r--lib/backup/database_configuration.rb107
-rw-r--r--lib/backup/database_connection.rb59
-rw-r--r--lib/backup/database_model.rb86
-rw-r--r--lib/backup/dump/postgres.rb13
-rw-r--r--lib/gitlab/gitaly_client/operation_service.rb24
-rw-r--r--locale/gitlab.pot5
-rw-r--r--spec/features/issues/discussion_lock_spec.rb254
-rw-r--r--spec/lib/backup/database_configuration_spec.rb239
-rw-r--r--spec/lib/backup/database_connection_spec.rb103
-rw-r--r--spec/lib/backup/database_model_spec.rb186
-rw-r--r--spec/lib/backup/database_spec.rb99
-rw-r--r--spec/lib/backup/dump/postgres_spec.rb68
-rw-r--r--spec/models/repository_spec.rb94
-rw-r--r--spec/views/profiles/keys/_form.html.haml_spec.rb4
-rw-r--r--spec/workers/process_commit_worker_spec.rb254
32 files changed, 1293 insertions, 683 deletions
diff --git a/.rubocop_todo/rspec/named_subject.yml b/.rubocop_todo/rspec/named_subject.yml
index 69a4316468e..e7222c70973 100644
--- a/.rubocop_todo/rspec/named_subject.yml
+++ b/.rubocop_todo/rspec/named_subject.yml
@@ -1694,7 +1694,6 @@ RSpec/NamedSubject:
- 'spec/lib/atlassian/jira_connect/serializers/repository_entity_spec.rb'
- 'spec/lib/atlassian/jira_connect/serializers/reviewer_entity_spec.rb'
- 'spec/lib/backup/database_backup_error_spec.rb'
- - 'spec/lib/backup/database_model_spec.rb'
- 'spec/lib/backup/database_spec.rb'
- 'spec/lib/backup/dump/postgres_spec.rb'
- 'spec/lib/backup/files_spec.rb'
diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml
index 0161399c12d..801854440c8 100644
--- a/.rubocop_todo/style/inline_disable_annotation.yml
+++ b/.rubocop_todo/style/inline_disable_annotation.yml
@@ -2866,7 +2866,6 @@ Style/InlineDisableAnnotation:
- 'spec/lib/api/base_spec.rb'
- 'spec/lib/api/entities/wiki_page_spec.rb'
- 'spec/lib/api/helpers/packages/npm_spec.rb'
- - 'spec/lib/backup/database_model_spec.rb'
- 'spec/lib/backup/database_spec.rb'
- 'spec/lib/backup/manager_spec.rb'
- 'spec/lib/banzai/filter/footnote_filter_spec.rb'
diff --git a/Gemfile b/Gemfile
index 60e6f73dc74..1a37f05946b 100644
--- a/Gemfile
+++ b/Gemfile
@@ -38,9 +38,10 @@ gem 'gitlab-safe_request_store', path: 'gems/gitlab-safe_request_store' # ruboco
# GitLab Monorepo Gems
group :monorepo do
gem 'gitlab-utils', path: 'gems/gitlab-utils' # rubocop:todo Gemfile/MissingFeatureCategory
- gem 'gitlab-backup-cli', path: 'gems/gitlab-backup-cli', feature_category: :backup_restore
end
+gem 'gitlab-backup-cli', path: 'gems/gitlab-backup-cli', require: 'gitlab/backup/cli', feature_category: :backup_restore
+
gem 'gitlab-secret_detection', path: 'gems/gitlab-secret_detection', feature_category: :secret_detection
# Responders respond_to and respond_with
@@ -548,7 +549,7 @@ gem 'ssh_data', '~> 1.3' # rubocop:todo Gemfile/MissingFeatureCategory
gem 'spamcheck', '~> 1.3.0' # rubocop:todo Gemfile/MissingFeatureCategory
# Gitaly GRPC protocol definitions
-gem 'gitaly', '~> 16.5.0.pre.rc1' # rubocop:todo Gemfile/MissingFeatureCategory
+gem 'gitaly', '~> 16.7.0-rc1', feature_category: :gitaly
# KAS GRPC protocol definitions
gem 'kas-grpc', '~> 0.3.0', feature_category: :deployment_management
diff --git a/Gemfile.checksum b/Gemfile.checksum
index 6b4cf264ef5..a4a2ac559e7 100644
--- a/Gemfile.checksum
+++ b/Gemfile.checksum
@@ -206,7 +206,7 @@
{"name":"gettext","version":"3.4.9","platform":"ruby","checksum":"292864fe6a15c224cee4125a4a72fab426fdbb280e4cff3cfe44935f549b009a"},
{"name":"gettext_i18n_rails","version":"1.11.0","platform":"ruby","checksum":"e19c7e4a256c500f7f38396dca44a282b9838ae278f57c362993a54964b22bbe"},
{"name":"git","version":"1.18.0","platform":"ruby","checksum":"c9b80462e4565cd3d7a9ba8440c41d2c52244b17b0dad0bfddb46de70630c465"},
-{"name":"gitaly","version":"16.5.0.pre.rc1","platform":"ruby","checksum":"ed17515ad04d4663a0efc15c8f2887b705f006133e8b10cc9321460eb0a38353"},
+{"name":"gitaly","version":"16.7.0.pre.rc1","platform":"ruby","checksum":"cd2a2fbbde6bfb51fd09c4b0fded247150cc4bea36a0b44008921d77810311f7"},
{"name":"gitlab","version":"4.19.0","platform":"ruby","checksum":"3f645e3e195dbc24f0834fbf83e8ccfb2056d8e9712b01a640aad418a6949679"},
{"name":"gitlab-chronic","version":"0.10.5","platform":"ruby","checksum":"f80f18dc699b708870a80685243331290bc10cfeedb6b99c92219722f729c875"},
{"name":"gitlab-dangerfiles","version":"4.6.0","platform":"ruby","checksum":"441b37b17d1dad36268517490a30aaf57e43dffb2e9ebc1da38d3bc9fa20741e"},
diff --git a/Gemfile.lock b/Gemfile.lock
index 19fb6256704..9c8bfff022f 100644
--- a/Gemfile.lock
+++ b/Gemfile.lock
@@ -666,7 +666,7 @@ GEM
git (1.18.0)
addressable (~> 2.8)
rchardet (~> 1.8)
- gitaly (16.5.0.pre.rc1)
+ gitaly (16.7.0.pre.rc1)
grpc (~> 1.0)
gitlab (4.19.0)
httparty (~> 0.20)
@@ -1871,7 +1871,7 @@ DEPENDENCIES
fuubar (~> 2.2.0)
gettext (~> 3.3)
gettext_i18n_rails (~> 1.11.0)
- gitaly (~> 16.5.0.pre.rc1)
+ gitaly (~> 16.7.0.pre.rc1)
gitlab-backup-cli!
gitlab-chronic (~> 0.10.5)
gitlab-dangerfiles (~> 4.6.0)
diff --git a/app/assets/javascripts/issuable/components/locked_badge.vue b/app/assets/javascripts/issuable/components/locked_badge.vue
index 652d02e8f9d..1bd399dc257 100644
--- a/app/assets/javascripts/issuable/components/locked_badge.vue
+++ b/app/assets/javascripts/issuable/components/locked_badge.vue
@@ -32,7 +32,7 @@ export default {
</script>
<template>
- <gl-badge v-gl-tooltip :title="title" variant="warning">
+ <gl-badge v-gl-tooltip :title="title" variant="warning" data-testid="locked-badge">
<gl-icon name="lock" />
<span class="gl-sr-only">{{ __('Locked') }}</span>
</gl-badge>
diff --git a/app/assets/javascripts/vue_shared/components/notes/noteable_warning.vue b/app/assets/javascripts/vue_shared/components/notes/noteable_warning.vue
index 3bee539688b..1ee752e8c19 100644
--- a/app/assets/javascripts/vue_shared/components/notes/noteable_warning.vue
+++ b/app/assets/javascripts/vue_shared/components/notes/noteable_warning.vue
@@ -72,7 +72,7 @@ export default {
};
</script>
<template>
- <div class="issuable-note-warning">
+ <div class="issuable-note-warning" data-testid="issuable-note-warning">
<gl-icon v-if="!isLockedAndConfidential" :name="warningIcon" :size="16" class="icon inline" />
<span v-if="isLockedAndConfidential" ref="lockedAndConfidential">
diff --git a/app/views/profiles/keys/_form.html.haml b/app/views/profiles/keys/_form.html.haml
index edb52f31ecc..b79013b33e1 100644
--- a/app/views/profiles/keys/_form.html.haml
+++ b/app/views/profiles/keys/_form.html.haml
@@ -25,12 +25,13 @@
%p.form-text.text-muted= ssh_key_expires_field_description
.js-add-ssh-key-validation-warning.hide
- .bs-callout.bs-callout-warning.gl-mt-0{ role: 'alert', aria_live: 'assertive' }
- %strong= _('Oops, are you sure?')
- %p= s_("Profiles|Publicly visible private SSH keys can compromise your system.")
- = render Pajamas::ButtonComponent.new(variant: :confirm,
- button_options: { class: 'js-add-ssh-key-validation-confirm-submit' }) do
- = _("Yes, add it")
+ = render Pajamas::AlertComponent.new(title: _('Are you sure?'), variant: :warning, dismissible: false) do |c|
+ - c.with_body do
+ = s_("Profiles|Publicly visible private SSH keys can compromise your system.")
+ - c.with_actions do
+ = render Pajamas::ButtonComponent.new(variant: :confirm,
+ button_options: { class: 'js-add-ssh-key-validation-confirm-submit' }) do
+ = _("Yes, add it")
= f.submit s_('Profiles|Add key'), class: "js-add-ssh-key-validation-original-submit", pajamas_button: true, data: { testid: 'add-key-button' }
= render Pajamas::ButtonComponent.new(button_options: { type: 'reset', class: 'js-add-ssh-key-validation-cancel gl-ml-2 js-toggle-button' }) do
diff --git a/app/workers/process_commit_worker.rb b/app/workers/process_commit_worker.rb
index cc72704d8c9..30e394a95cf 100644
--- a/app/workers/process_commit_worker.rb
+++ b/app/workers/process_commit_worker.rb
@@ -42,6 +42,8 @@ class ProcessCommitWorker
update_issue_metrics(commit, author)
end
+ private
+
def process_commit_message(project, commit, user, author, default = false)
# Ignore closing references from GitLab-generated commit messages.
find_closing_issues = default && !commit.merged_merge_request?(user)
diff --git a/doc/administration/audit_event_streaming/index.md b/doc/administration/audit_event_streaming/index.md
index 3d4250013ce..71ae33b3d87 100644
--- a/doc/administration/audit_event_streaming/index.md
+++ b/doc/administration/audit_event_streaming/index.md
@@ -587,6 +587,85 @@ To delete Google Cloud Logging streaming destinations to an instance:
1. Select **Delete destination**.
1. Confirm by selecting **Delete destination** in the dialog.
+### AWS S3 destinations
+
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/138245) in GitLab 16.7 [with a flag](../feature_flags.md) named `allow_streaming_instance_audit_events_to_amazon_s3`. Disabled by default.
+
+FLAG:
+On self-managed GitLab, by default this feature is not available. To enable the feature, an administrator can [enable the feature flag](../feature_flags.md) named
+`allow_streaming_instance_audit_events_to_amazon_s3`. On GitLab.com, this feature is not available.
+
+Manage AWS S3 destinations for entire instance.
+
+#### Prerequisites
+
+Before setting up AWS S3 streaming audit events, you must:
+
+1. Create a access key for AWS with the appropriate credentials and permissions. This account is used to configure audit log streaming authentication.
+ For more information, see [Managing access keys](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_credentials_access-keys.html?icmpid=docs_iam_console#Using_CreateAccessKey).
+1. Create a AWS S3 bucket. This bucket is used to store audit log streaming data. For more information, see [Creating a bucket](https://docs.aws.amazon.com/AmazonS3/latest/userguide/create-bucket-overview.html)
+
+#### Add a new AWS S3 destination
+
+Prerequisites:
+
+- Administrator access on the instance.
+
+To add AWS S3 streaming destinations to an instance:
+
+1. On the left sidebar, at the bottom, select **Admin Area**.
+1. Select **Monitoring > Audit Events**.
+1. On the main area, select the **Streams** tab.
+1. Select **Add streaming destination** and select **AWS S3** to show the section for adding destinations.
+1. Enter a random string to use as a name for the new destination.
+1. Enter the Access Key ID, Secret Access Key, Bucket Name, and AWS Region from previously-created AWS access key and bucket to add to the new destination.
+1. Select **Add** to add the new streaming destination.
+
+#### List AWS S3 destinations
+
+Prerequisites:
+
+- Administrator access on the instance.
+
+To list AWS S3 streaming destinations for an instance.
+
+1. On the left sidebar, at the bottom, select **Admin Area**.
+1. Select **Monitoring > Audit Events**.
+1. On the main area, select the **Streams** tab.
+1. Select the AWS S3 stream to expand and see all the fields.
+
+#### Update an AWS S3 destination
+
+Prerequisites:
+
+- Administrator access on the instance.
+
+To update AWS S3 streaming destinations to an instance:
+
+1. On the left sidebar, at the bottom, select **Admin Area**.
+1. Select **Monitoring > Audit Events**.
+1. On the main area, select the **Streams** tab.
+1. Select the AWS S3 stream to expand.
+1. Enter a random string to use as a name for the destination.
+1. Enter the Access Key ID, Secret Access Key, Bucket Name, and AWS Region from previously-created AWS access key and bucket to update the destination.
+1. Select **Add a new Secret Access Key** and enter a AWS Secret Access Key to update the Secret Access Key.
+1. Select **Save** to update the streaming destination.
+
+#### Delete an AWS S3 streaming destination
+
+Prerequisites:
+
+- Administrator access on the instance.
+
+To delete AWS S3 streaming destinations on an instance:
+
+1. On the left sidebar, at the bottom, select **Admin Area**.
+1. Select **Monitoring > Audit Events**.
+1. On the main area, select the **Streams** tab.
+1. Select the AWS S3 stream to expand.
+1. Select **Delete destination**.
+1. Confirm by selecting **Delete destination** in the dialog.
+
## Payload schema
> Documentation for an audit event streaming schema was [introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358149) in GitLab 15.3.
diff --git a/gems/gitlab-backup-cli/lib/gitlab/backup/cli.rb b/gems/gitlab-backup-cli/lib/gitlab/backup/cli.rb
index 01ea934863f..4c46dff868c 100644
--- a/gems/gitlab-backup-cli/lib/gitlab/backup/cli.rb
+++ b/gems/gitlab-backup-cli/lib/gitlab/backup/cli.rb
@@ -6,6 +6,7 @@ module Gitlab
module Cli
autoload :VERSION, 'gitlab/backup/cli/version'
autoload :Runner, 'gitlab/backup/cli/runner'
+ autoload :Utils, 'gitlab/backup/cli/utils'
Error = Class.new(StandardError)
# Your code goes here...
diff --git a/gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils.rb b/gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils.rb
new file mode 100644
index 00000000000..84e6c605172
--- /dev/null
+++ b/gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Backup
+ module Cli
+ module Utils
+ autoload :PgDump, 'gitlab/backup/cli/utils/pg_dump'
+ end
+ end
+ end
+end
diff --git a/gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils/pg_dump.rb b/gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils/pg_dump.rb
new file mode 100644
index 00000000000..6c16ca5cf06
--- /dev/null
+++ b/gems/gitlab-backup-cli/lib/gitlab/backup/cli/utils/pg_dump.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module Backup
+ module Cli
+ module Utils
+ class PgDump
+ # Expose snapshot_id to be used when creating a database dump
+ # See https://www.postgresql.org/docs/14/functions-admin.html#FUNCTIONS-SNAPSHOT-SYNCHRONIZATION
+ attr_reader :snapshot_id
+ # Dump only specified database schemas instead of everything
+ attr_reader :schemas
+ # Database name
+ attr_reader :database_name
+ # Additional ENV variables to use when running PgDump
+ attr_reader :env
+
+ # @param [String] database_name
+ # @param [String] snapshot_id the snapshot id to use when creating a database dump
+ # @param [Array<String>] schemas
+ # @param [Hash<String,String>] env
+ def initialize(database_name:, snapshot_id: nil, schemas: [], env: {})
+ @database_name = database_name
+ @snapshot_id = snapshot_id
+ @schemas = schemas
+ @env = env
+ end
+
+ # Spawn a pg_dump process and assign a given output IO
+ #
+ # @param [IO] output the output IO
+ def spawn(output:)
+ Process.spawn(env, 'pg_dump', *cmd_args, out: output)
+ end
+
+ private
+
+ # Returns a list of arguments used by the pg_dump command
+ #
+ # @return [Array<String (frozen)>]
+ def cmd_args
+ args = ["--clean"] # Pass '--clean' to include 'DROP TABLE' statements in the DB dump.
+ args << '--if-exists'
+ args << "--snapshot=#{snapshot_id}" if snapshot_id
+
+ schemas.each do |schema|
+ args << '-n'
+ args << schema
+ end
+
+ args << database_name
+
+ args
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/gems/gitlab-backup-cli/sig/gitlab/backup/cli.rbs b/gems/gitlab-backup-cli/sig/gitlab/backup/cli.rbs
index 25540c06400..76b68239e30 100644
--- a/gems/gitlab-backup-cli/sig/gitlab/backup/cli.rbs
+++ b/gems/gitlab-backup-cli/sig/gitlab/backup/cli.rbs
@@ -1,6 +1,7 @@
module Gitlab
module Backup
module Cli
+ Error: StandardError
VERSION: String
end
end
diff --git a/gems/gitlab-backup-cli/sig/gitlab/backup/cli/utils/pg_dump.rbs b/gems/gitlab-backup-cli/sig/gitlab/backup/cli/utils/pg_dump.rbs
new file mode 100644
index 00000000000..1718d0df6c0
--- /dev/null
+++ b/gems/gitlab-backup-cli/sig/gitlab/backup/cli/utils/pg_dump.rbs
@@ -0,0 +1,25 @@
+module Gitlab
+ module Backup
+ module Cli
+ module Utils
+ class PgDump
+ attr_reader snapshot_id: String
+ attr_reader schemas: Array[String]
+ attr_reader database_name: String
+ attr_reader env: Hash[String,String]
+
+ def initialize: (database_name: String, ?snapshot_id: String?, ?schemas: Array[String], ?env: Hash[String, String]) -> void
+
+ # Spawn a pg_dump process and assign a given output IO
+ #
+ # @param [IO] output the output IO
+ def spawn: (output: IO) -> Integer
+
+ private
+
+ def cmd_args: () -> Array[String]
+ end
+ end
+ end
+ end
+end
diff --git a/gems/gitlab-backup-cli/spec/gitlab/backup/cli/utils/pg_dump_spec.rb b/gems/gitlab-backup-cli/spec/gitlab/backup/cli/utils/pg_dump_spec.rb
new file mode 100644
index 00000000000..b1e8495c637
--- /dev/null
+++ b/gems/gitlab-backup-cli/spec/gitlab/backup/cli/utils/pg_dump_spec.rb
@@ -0,0 +1,81 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Backup::Cli::Utils::PgDump do
+ let(:cmd_args) { pg_dump.send(:cmd_args) }
+ let(:database_name) { 'gitlab_database' }
+ let(:env) do
+ {
+ 'PGHOST' => '192.168.99.99',
+ 'PGPORT' => '5434'
+ }
+ end
+
+ subject(:pg_dump) { described_class.new(database_name: database_name) }
+
+ context 'with accessors' do
+ it { respond_to :database_name }
+ it { respond_to :snapshot_id }
+ it { respond_to :schemas }
+ it { respond_to :env }
+ end
+
+ describe '#cmd_args' do
+ let(:default_args) { %w[--clean --if-exists] }
+
+ context 'when no optional parameter is provided' do
+ it 'returns default arguments' do
+ expect(cmd_args).to eq(default_args << database_name)
+ end
+ end
+
+ context 'with custom snapshot_id' do
+ let(:snapshot_id) { '00000003-000001BF-1' }
+
+ subject(:pg_dump) { described_class.new(database_name: database_name, snapshot_id: snapshot_id) }
+
+ it 'adds a flag between default_args and the database name' do
+ expect(cmd_args).to eq(default_args + %W[--snapshot=#{snapshot_id} #{database_name}])
+ end
+ end
+
+ context 'with custom schemas' do
+ let(:schemas) { %w[public gitlab_partitions_dynamic gitlab_partitions_static] }
+
+ subject(:pg_dump) { described_class.new(database_name: database_name, schemas: schemas) }
+
+ it 'adds additional flags for each schema' do
+ schemas_args = %W[-n #{schemas[0]} -n #{schemas[1]} -n #{schemas[2]}]
+ expected_args = (default_args + schemas_args) << database_name
+
+ expect(cmd_args).to eq(expected_args)
+ end
+ end
+ end
+
+ describe '#spawn' do
+ it 'returns a spawned process' do
+ process = instance_double(Process)
+ expect(Process).to receive(:spawn).and_return(process)
+
+ expect(pg_dump.spawn(output: StringIO)).to eq(process)
+ end
+
+ it 'forwards cmd_args to Process spawn' do
+ expect(Process).to receive(:spawn).with({}, 'pg_dump', *cmd_args, any_args)
+
+ pg_dump.spawn(output: StringIO)
+ end
+
+ context 'when env variables are provided' do
+ subject(:pg_dump) { described_class.new(database_name: database_name, env: env) }
+
+ it 'forwards provided env variables to Process spawn' do
+ expect(Process).to receive(:spawn).with(env, 'pg_dump', any_args)
+
+ pg_dump.spawn(output: StringIO)
+ end
+ end
+ end
+end
diff --git a/lib/backup/database.rb b/lib/backup/database.rb
index 27b672ff6b6..a0eaccb1ca4 100644
--- a/lib/backup/database.rb
+++ b/lib/backup/database.rb
@@ -24,44 +24,37 @@ module Backup
end
override :dump
- def dump(destination_dir, backup_id)
+ def dump(destination_dir, _)
FileUtils.mkdir_p(destination_dir)
- each_database(destination_dir) do |database_name, current_db|
- model = current_db[:model]
- snapshot_id = current_db[:snapshot_id]
+ each_database(destination_dir) do |backup_connection|
+ pg_env = backup_connection.database_configuration.pg_env_variables
+ active_record_config = backup_connection.database_configuration.activerecord_variables
+ pg_database_name = active_record_config[:database]
- pg_env = model.config[:pg_env]
- connection = model.connection
- active_record_config = model.config[:activerecord]
- pg_database = active_record_config[:database]
+ dump_file_name = file_name(destination_dir, backup_connection.connection_name)
+ FileUtils.rm_f(dump_file_name)
- db_file_name = file_name(destination_dir, database_name)
- FileUtils.rm_f(db_file_name)
-
- progress.print "Dumping PostgreSQL database #{pg_database} ... "
+ progress.print "Dumping PostgreSQL database #{pg_database_name} ... "
- pgsql_args = ["--clean"] # Pass '--clean' to include 'DROP TABLE' statements in the DB dump.
- pgsql_args << '--if-exists'
- pgsql_args << "--snapshot=#{snapshot_id}" if snapshot_id
+ schemas = []
if Gitlab.config.backup.pg_schema
- pgsql_args << '-n'
- pgsql_args << Gitlab.config.backup.pg_schema
-
- Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
- pgsql_args << '-n'
- pgsql_args << schema.to_s
- end
+ schemas << Gitlab.config.backup.pg_schema
+ schemas.push(*Gitlab::Database::EXTRA_SCHEMAS.map(&:to_s))
end
- success = with_transient_pg_env(pg_env) do
- Backup::Dump::Postgres.new.dump(pg_database, db_file_name, pgsql_args)
- end
+ pg_dump = ::Gitlab::Backup::Cli::Utils::PgDump.new(
+ database_name: pg_database_name,
+ snapshot_id: backup_connection.snapshot_id,
+ schemas: schemas,
+ env: pg_env)
+
+ success = Backup::Dump::Postgres.new.dump(dump_file_name, pg_dump)
- connection.rollback_transaction if snapshot_id
+ backup_connection.release_snapshot! if backup_connection.snapshot_id
- raise DatabaseBackupError.new(active_record_config, db_file_name) unless success
+ raise DatabaseBackupError.new(active_record_config, dump_file_name) unless success
report_success(success)
progress.flush
@@ -76,10 +69,10 @@ module Backup
override :restore
def restore(destination_dir, backup_id)
- base_models_for_backup.each do |database_name, _base_model|
- backup_model = Backup::DatabaseModel.new(database_name)
+ base_models_for_backup.each do |database_name, _|
+ backup_connection = Backup::DatabaseConnection.new(database_name)
- config = backup_model.config[:activerecord]
+ config = backup_connection.database_configuration.activerecord_variables
db_file_name = file_name(destination_dir, database_name)
database = config[:database]
@@ -100,7 +93,7 @@ module Backup
# hanging out from a failed upgrade
drop_tables(database_name)
- pg_env = backup_model.config[:pg_env]
+ pg_env = backup_connection.database_configuration.pg_env_variables
success = with_transient_pg_env(pg_env) do
decompress_rd, decompress_wr = IO.pipe
decompress_pid = spawn(decompress_cmd, out: decompress_wr, in: db_file_name)
@@ -235,6 +228,7 @@ module Backup
puts_time 'done'.color(:green)
end
+ # @deprecated This will be removed when restore operation is refactored to use extended_env directly
def with_transient_pg_env(extended_env)
ENV.merge!(extended_env)
result = yield
@@ -248,32 +242,36 @@ module Backup
end
def each_database(destination_dir, &block)
- databases = {}
+ databases = []
+
+ # each connection will loop through all database connections defined in `database.yml`
+ # and reject the ones that are shared, so we don't get duplicates
+ #
+ # we consider a connection to be shared when it has `database_tasks: false`
::Gitlab::Database::EachDatabase.each_connection(
only: base_models_for_backup.keys, include_shared: false
- ) do |_connection, name|
- next if databases[name]
-
- backup_model = Backup::DatabaseModel.new(name)
-
- databases[name] = {
- model: backup_model
- }
+ ) do |_, database_connection_name|
+ backup_connection = Backup::DatabaseConnection.new(database_connection_name)
+ databases << backup_connection
- next unless Gitlab::Database.database_mode == Gitlab::Database::MODE_MULTIPLE_DATABASES
-
- connection = backup_model.connection
+ next unless multiple_databases?
begin
- Gitlab::Database::TransactionTimeoutSettings.new(connection).disable_timeouts
- connection.begin_transaction(isolation: :repeatable_read)
- databases[name][:snapshot_id] = connection.select_value("SELECT pg_export_snapshot()")
+ # Trigger a transaction snapshot export that will be used by pg_dump later on
+ backup_connection.export_snapshot!
rescue ActiveRecord::ConnectionNotEstablished
- raise Backup::DatabaseBackupError.new(backup_model.config[:activerecord], file_name(destination_dir, name))
+ raise Backup::DatabaseBackupError.new(
+ backup_connection.database_configuration.activerecord_variables,
+ file_name(destination_dir, database_connection_name)
+ )
end
end
databases.each(&block)
end
+
+ def multiple_databases?
+ Gitlab::Database.database_mode == Gitlab::Database::MODE_MULTIPLE_DATABASES
+ end
end
end
diff --git a/lib/backup/database_configuration.rb b/lib/backup/database_configuration.rb
new file mode 100644
index 00000000000..1a6a476f9c1
--- /dev/null
+++ b/lib/backup/database_configuration.rb
@@ -0,0 +1,107 @@
+# frozen_string_literal: true
+
+module Backup
+ class DatabaseConfiguration
+ # Connection name is the key used in `config/database.yml` for multi-database connection configuration
+ #
+ # @return [String]
+ attr_reader :connection_name
+
+ # ActiveRecord base model that is configured to connect to the database identified by connection_name key
+ #
+ # @return [ActiveRecord::Base]
+ attr_reader :source_model
+
+ # Initializes configuration
+ #
+ # @param [String] connection_name the key from `database.yml` for multi-database connection configuration
+ def initialize(connection_name)
+ @connection_name = connection_name
+ @source_model = Gitlab::Database.database_base_models_with_gitlab_shared[connection_name] ||
+ Gitlab::Database.database_base_models_with_gitlab_shared['main']
+ @activerecord_database_config = ActiveRecord::Base.configurations.find_db_config(connection_name)
+ end
+
+ # ENV variables that can override each database configuration
+ # These are used along with OVERRIDE_PREFIX and database name
+ # @see #process_config_overrides!
+ SUPPORTED_OVERRIDES = {
+ username: 'PGUSER',
+ host: 'PGHOST',
+ port: 'PGPORT',
+ password: 'PGPASSWORD',
+ # SSL
+ sslmode: 'PGSSLMODE',
+ sslkey: 'PGSSLKEY',
+ sslcert: 'PGSSLCERT',
+ sslrootcert: 'PGSSLROOTCERT',
+ sslcrl: 'PGSSLCRL',
+ sslcompression: 'PGSSLCOMPRESSION'
+ }.freeze
+
+ # Prefixes used for ENV variables overriding database configuration
+ OVERRIDE_PREFIXES = %w[GITLAB_BACKUP_ GITLAB_OVERRIDE_].freeze
+
+ # Return the HashConfig for the database
+ #
+ # @return [ActiveRecord::DatabaseConfigurations::HashConfig]
+ def activerecord_configuration
+ ActiveRecord::DatabaseConfigurations::HashConfig.new(
+ @activerecord_database_config.env_name,
+ connection_name,
+ activerecord_variables
+ )
+ end
+
+ # Return postgres ENV variable values for current database with overrided values
+ #
+ # @return[Hash<String,String>] hash of postgres ENV variables
+ def pg_env_variables
+ process_config_overrides! unless @pg_env_variables
+
+ @pg_env_variables
+ end
+
+ # Return activerecord configuration values for current database with overrided values
+ #
+ # @return[Hash<String,String>] activerecord database.yml configuration compatible values
+ def activerecord_variables
+ process_config_overrides! unless @activerecord_variables
+
+ @activerecord_variables
+ end
+
+ private
+
+ def process_config_overrides!
+ @activerecord_variables = original_activerecord_config
+ @pg_env_variables = {}
+
+ SUPPORTED_OVERRIDES.each do |config_key, env_variable_name|
+ # This enables the use of different PostgreSQL settings in
+ # case PgBouncer is used. PgBouncer clears the search path,
+ # which wreaks havoc on Rails if connections are reused.
+ OVERRIDE_PREFIXES.each do |override_prefix|
+ override_all = "#{override_prefix}#{env_variable_name}"
+ override_db = "#{override_prefix}#{connection_name.upcase}_#{env_variable_name}"
+ val = ENV[override_db].presence ||
+ ENV[override_all].presence ||
+ @activerecord_variables[config_key].to_s.presence
+
+ next unless val
+
+ @pg_env_variables[env_variable_name] = val
+ @activerecord_variables[config_key] = val
+ end
+ end
+ end
+
+ # Return the database configuration from rails config/database.yml file
+ # in the format expected by ActiveRecord::DatabaseConfigurations::HashConfig
+ #
+ # @return [Hash] configuration hash
+ def original_activerecord_config
+ @activerecord_database_config.configuration_hash.dup
+ end
+ end
+end
diff --git a/lib/backup/database_connection.rb b/lib/backup/database_connection.rb
new file mode 100644
index 00000000000..f3f0a5dfcb5
--- /dev/null
+++ b/lib/backup/database_connection.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+module Backup
+ class DatabaseConnection
+ attr_reader :database_configuration, :snapshot_id
+
+ delegate :connection_name, to: :database_configuration
+ delegate :connection, to: :@backup_model
+
+ # Initializes a database connection
+ #
+ # @param [String] connection_name the key from `database.yml` for multi-database connection configuration
+ def initialize(connection_name)
+ @database_configuration = Backup::DatabaseConfiguration.new(connection_name)
+ @backup_model = backup_model
+ @snapshot_id = nil
+
+ configure_backup_model
+ end
+
+ # Start a new transaction and run pg_export_snapshot()
+ # Returns the snapshot identifier
+ #
+ # @return [String] snapshot identifier
+ def export_snapshot!
+ Gitlab::Database::TransactionTimeoutSettings.new(connection).disable_timeouts
+
+ connection.begin_transaction(isolation: :repeatable_read)
+ @snapshot_id = connection.select_value("SELECT pg_export_snapshot()")
+ end
+
+ # Rollback the transaction to release the effects of pg_export_snapshot()
+ def release_snapshot!
+ return unless snapshot_id
+
+ connection.rollback_transaction
+ @snapshot_id = nil
+ end
+
+ private
+
+ delegate :activerecord_configuration, to: :database_configuration, private: true
+
+ def configure_backup_model
+ @backup_model.establish_connection(activerecord_configuration)
+
+ Gitlab::Database::LoadBalancing::Setup.new(@backup_model).setup
+ end
+
+ # Creates a disposable model to be used to host the Backup connection only
+ def backup_model
+ klass_name = connection_name.camelize
+
+ return "#{self.class.name}::#{klass_name}".constantize if self.class.const_defined?(klass_name.to_sym, false)
+
+ self.class.const_set(klass_name, Class.new(ApplicationRecord))
+ end
+ end
+end
diff --git a/lib/backup/database_model.rb b/lib/backup/database_model.rb
deleted file mode 100644
index 228a7fa5383..00000000000
--- a/lib/backup/database_model.rb
+++ /dev/null
@@ -1,86 +0,0 @@
-# frozen_string_literal: true
-
-module Backup
- class DatabaseModel
- SUPPORTED_OVERRIDES = {
- username: 'PGUSER',
- host: 'PGHOST',
- port: 'PGPORT',
- password: 'PGPASSWORD',
- # SSL
- sslmode: 'PGSSLMODE',
- sslkey: 'PGSSLKEY',
- sslcert: 'PGSSLCERT',
- sslrootcert: 'PGSSLROOTCERT',
- sslcrl: 'PGSSLCRL',
- sslcompression: 'PGSSLCOMPRESSION'
- }.freeze
-
- OVERRIDE_PREFIXES = %w[GITLAB_BACKUP_ GITLAB_OVERRIDE_].freeze
-
- attr_reader :config
-
- def initialize(name)
- configure_model(name)
- end
-
- def connection
- @model.connection
- end
-
- private
-
- def configure_model(name)
- source_model = Gitlab::Database.database_base_models_with_gitlab_shared[name] ||
- Gitlab::Database.database_base_models_with_gitlab_shared['main']
-
- @model = backup_model_for(name)
-
- original_config = source_model.connection_db_config.configuration_hash.dup
-
- @config = config_for_backup(name, original_config)
-
- @model.establish_connection(
- ActiveRecord::DatabaseConfigurations::HashConfig.new(
- source_model.connection_db_config.env_name,
- name.to_s,
- original_config.merge(@config[:activerecord])
- )
- )
-
- Gitlab::Database::LoadBalancing::Setup.new(@model).setup
- end
-
- def backup_model_for(name)
- klass_name = name.camelize
-
- return "#{self.class.name}::#{klass_name}".constantize if self.class.const_defined?(klass_name.to_sym, false)
-
- self.class.const_set(klass_name, Class.new(ApplicationRecord))
- end
-
- def config_for_backup(name, config)
- db_config = {
- activerecord: config,
- pg_env: {}
- }
- SUPPORTED_OVERRIDES.each do |opt, arg|
- # This enables the use of different PostgreSQL settings in
- # case PgBouncer is used. PgBouncer clears the search path,
- # which wreaks havoc on Rails if connections are reused.
- OVERRIDE_PREFIXES.each do |override_prefix|
- override_all = "#{override_prefix}#{arg}"
- override_db = "#{override_prefix}#{name.upcase}_#{arg}"
- val = ENV[override_db].presence || ENV[override_all].presence || config[opt].to_s.presence
-
- next unless val
-
- db_config[:pg_env][arg] = val
- db_config[:activerecord][opt] = val
- end
- end
-
- db_config
- end
- end
-end
diff --git a/lib/backup/dump/postgres.rb b/lib/backup/dump/postgres.rb
index 52690d242df..80a49971140 100644
--- a/lib/backup/dump/postgres.rb
+++ b/lib/backup/dump/postgres.rb
@@ -4,14 +4,21 @@ module Backup
class Postgres
include Backup::Helper
+ # Owner can read/write, group no permission, others no permission
FILE_PERMISSION = 0o600
- def dump(database_name, output_file, pgsql_args)
+ # Triggers PgDump and outputs to the provided file path
+ #
+ # @param [String] output_file_path full path to the output destination
+ # @param [Gitlab::Backup::Cli::Utils::PgDump] pg_dump
+ # @return [Boolean] whether pg_dump finished with success
+ def dump(output_file_path, pg_dump)
compress_rd, compress_wr = IO.pipe
- compress_pid = spawn(compress_cmd, in: compress_rd, out: [output_file, 'w', FILE_PERMISSION])
+
+ compress_pid = spawn(compress_cmd, in: compress_rd, out: [output_file_path, 'w', FILE_PERMISSION])
compress_rd.close
- dump_pid = Process.spawn('pg_dump', *pgsql_args, database_name, out: compress_wr)
+ dump_pid = pg_dump.spawn(output: compress_wr)
compress_wr.close
[compress_pid, dump_pid].all? do |pid|
diff --git a/lib/gitlab/gitaly_client/operation_service.rb b/lib/gitlab/gitaly_client/operation_service.rb
index 905588c2afc..882982b3cde 100644
--- a/lib/gitlab/gitaly_client/operation_service.rb
+++ b/lib/gitlab/gitaly_client/operation_service.rb
@@ -288,8 +288,6 @@ module Gitlab
def rebase(user, rebase_id, branch:, branch_sha:, remote_repository:, remote_branch:, push_options: [])
request_enum = QueueEnumerator.new
- rebase_sha = nil
-
response_enum = gitaly_client_call(
@repository.storage,
:operation_service,
@@ -316,16 +314,14 @@ module Gitlab
)
)
- perform_next_gitaly_rebase_request(response_enum) do |response|
- rebase_sha = response.rebase_sha
- end
+ response = response_enum.next
+ rebase_sha = response.rebase_sha
yield rebase_sha
# Second request confirms with gitaly to finalize the rebase
request_enum.push(Gitaly::UserRebaseConfirmableRequest.new(apply: true))
-
- perform_next_gitaly_rebase_request(response_enum)
+ response_enum.next
rebase_sha
rescue GRPC::BadStatus => e
@@ -528,20 +524,6 @@ module Gitlab
private
- def perform_next_gitaly_rebase_request(response_enum)
- response = response_enum.next
-
- if response.pre_receive_error.present?
- raise Gitlab::Git::PreReceiveError, response.pre_receive_error
- elsif response.git_error.present?
- raise Gitlab::Git::Repository::GitError, response.git_error
- end
-
- yield response if block_given?
-
- response
- end
-
def call_cherry_pick_or_revert(rpc, user:, commit:, branch_name:, message:, start_branch_name:, start_repository:, dry_run:)
request_class = "Gitaly::User#{rpc.to_s.camelcase}Request".constantize
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 487bac19c14..77f2418edab 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -6935,7 +6935,7 @@ msgstr ""
msgid "AuditStreams|AWS S3"
msgstr ""
-msgid "AuditStreams|Access Key Xid"
+msgid "AuditStreams|Access Key ID"
msgstr ""
msgid "AuditStreams|Active"
@@ -33644,9 +33644,6 @@ msgstr ""
msgid "Only ‘Reporter’ roles and above on tiers Premium and above can see Productivity Analytics."
msgstr ""
-msgid "Oops, are you sure?"
-msgstr ""
-
msgid "Open"
msgstr ""
diff --git a/spec/features/issues/discussion_lock_spec.rb b/spec/features/issues/discussion_lock_spec.rb
index 1b7357a774a..2ef912061e6 100644
--- a/spec/features/issues/discussion_lock_spec.rb
+++ b/spec/features/issues/discussion_lock_spec.rb
@@ -6,104 +6,240 @@ RSpec.describe 'Discussion Lock', :js, feature_category: :team_planning do
let(:user) { create(:user) }
let(:issue) { create(:issue, project: project, author: user) }
let(:project) { create(:project, :public) }
+ let(:more_dropdown) { find_by_testid('desktop-dropdown') }
+ let(:issuable_lock) { find_by_testid('issuable-lock') }
+ let(:locked_badge) { '[data-testid="locked-badge"]' }
+ let(:issuable_note_warning) { '[data-testid="issuable-note-warning"]' }
- before do
- sign_in(user)
- stub_feature_flags(moved_mr_sidebar: false)
- end
-
- context 'when a user is a team member' do
+ context 'when feature flag is disabled' do
before do
- project.add_developer(user)
+ sign_in(user)
+ stub_feature_flags(moved_mr_sidebar: false)
end
- context 'when the discussion is unlocked' do
- it 'the user can lock the issue' do
- visit project_issue_path(project, issue)
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ end
- expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ context 'when the discussion is unlocked' do
+ it 'the user can lock the issue' do
+ visit project_issue_path(project, issue)
- page.within('.issuable-sidebar') do
- find('.lock-edit').click
- click_button('Lock')
- end
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Lock')
+ end
- expect(find('#notes')).to have_content('locked the discussion in this issue')
+ expect(find('#notes')).to have_content('locked the discussion in this issue')
+ end
end
- end
- context 'when the discussion is locked' do
- before do
- issue.update_attribute(:discussion_locked, true)
- visit project_issue_path(project, issue)
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ end
+
+ it 'the user can unlock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ click_button('Unlock')
+ end
+
+ expect(find('#notes')).to have_content('unlocked the discussion in this issue')
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ end
+
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
+ end
- it 'the user can unlock the issue' do
- expect(find('.issuable-sidebar')).to have_content('Locked')
+ context 'when a user is not a team member' do
+ context 'when the discussion is unlocked' do
+ before do
+ visit project_issue_path(project, issue)
+ end
- page.within('.issuable-sidebar') do
- find('.lock-edit').click
- click_button('Unlock')
+ it 'the user can not lock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
end
- expect(find('#notes')).to have_content('unlocked the discussion in this issue')
- expect(find('.issuable-sidebar')).to have_content('Unlocked')
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
- it 'the user can create a comment' do
- page.within('#notes .js-main-target-form') do
- fill_in 'note[note]', with: 'Some new comment'
- click_button 'Comment'
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
end
- wait_for_requests
+ it 'the user can not unlock the issue' do
+ expect(find('.issuable-sidebar')).to have_content('Locked')
+ expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ end
- expect(find('div#notes')).to have_content('Some new comment')
+ it 'the user can not create a comment' do
+ page.within('#notes') do
+ expect(page).not_to have_selector('.js-main-target-form')
+ expect(find_by_testid('disabled-comments'))
+ .to have_content('The discussion in this issue is locked. Only project members can comment.')
+ end
+ end
end
end
- end
- context 'when a user is not a team member' do
- context 'when the discussion is unlocked' do
+ context 'for axe automated accessibility testing' do
before do
+ project.add_developer(user)
+ issue.update_attribute(:discussion_locked, true)
visit project_issue_path(project, issue)
end
- it 'the user can not lock the issue' do
- expect(find('.issuable-sidebar')).to have_content('Unlocked')
- expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ it 'passes tests' do
+ # rubocop:disable Capybara/TestidFinders -- within_testid does not work here
+ expect(page).to be_axe_clean.within(locked_badge)
+ expect(page).to be_axe_clean.within(issuable_note_warning)
+ # rubocop:enable Capybara/TestidFinders
+ page.within('.issuable-sidebar') do
+ find('.lock-edit').click
+ expect(page).to be_axe_clean.within('.lock-edit')
+ end
end
+ end
+ end
+
+ context 'when feature flag is enabled' do
+ before do
+ sign_in(user)
+ stub_feature_flags(moved_mr_sidebar: true)
+ end
+
+ context 'when a user is a team member' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'when the discussion is unlocked' do
+ it 'the user can lock the issue' do
+ visit project_issue_path(project, issue)
+
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Lock discussion')
- it 'the user can create a comment' do
- page.within('#notes .js-main-target-form') do
- fill_in 'note[note]', with: 'Some new comment'
- click_button 'Comment'
+ issuable_lock.click
+ expect(find('#notes')).to have_content('locked the discussion in this issue')
+ end
+ end
+
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
end
- wait_for_requests
+ it 'the user can unlock the issue' do
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Unlock discussion')
+
+ issuable_lock.click
+ expect(find('#notes')).to have_content('unlocked the discussion in this issue')
+ expect(issuable_lock).to have_content('Lock discussion')
+ end
- expect(find('div#notes')).to have_content('Some new comment')
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
end
- context 'when the discussion is locked' do
- before do
- issue.update_attribute(:discussion_locked, true)
- visit project_issue_path(project, issue)
- end
+ context 'when a user is not a team member' do
+ context 'when the discussion is unlocked' do
+ before do
+ visit project_issue_path(project, issue)
+ end
- it 'the user can not unlock the issue' do
- expect(find('.issuable-sidebar')).to have_content('Locked')
- expect(find('.issuable-sidebar')).not_to have_selector('.lock-edit')
+ it 'the user can not lock the issue' do
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Lock discussion')
+ end
+
+ it 'the user can create a comment' do
+ page.within('#notes .js-main-target-form') do
+ fill_in 'note[note]', with: 'Some new comment'
+ click_button 'Comment'
+ end
+
+ wait_for_requests
+
+ expect(find('div#notes')).to have_content('Some new comment')
+ end
end
- it 'the user can not create a comment' do
- page.within('#notes') do
- expect(page).not_to have_selector('js-main-target-form')
- expect(find_by_testid('disabled-comments'))
- .to have_content('The discussion in this issue is locked. Only project members can comment.')
+ context 'when the discussion is locked' do
+ before do
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ end
+
+ it 'the user can not unlock the issue' do
+ more_dropdown.click
+ expect(issuable_lock).to have_content('Unlock discussion')
+ end
+
+ it 'the user can not create a comment' do
+ page.within('#notes') do
+ expect(page).not_to have_selector('js-main-target-form')
+ expect(find_by_testid('disabled-comments'))
+ .to have_content('The discussion in this issue is locked. Only project members can comment.')
+ end
end
end
end
+
+ it 'passes axe automated accessibility testing' do
+ project.add_developer(user)
+ issue.update_attribute(:discussion_locked, true)
+ visit project_issue_path(project, issue)
+ wait_for_all_requests
+
+ # rubocop:disable Capybara/TestidFinders -- within_testid does not work here
+ expect(page).to be_axe_clean.within(locked_badge)
+ expect(page).to be_axe_clean.within(issuable_note_warning)
+
+ more_dropdown.click
+ expect(page).to be_axe_clean.within('[data-testid="lock-issue-toggle"] button')
+ # rubocop:enable Capybara/TestidFinders
+ end
end
end
diff --git a/spec/lib/backup/database_configuration_spec.rb b/spec/lib/backup/database_configuration_spec.rb
new file mode 100644
index 00000000000..b7fa9f161c1
--- /dev/null
+++ b/spec/lib/backup/database_configuration_spec.rb
@@ -0,0 +1,239 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseConfiguration, :reestablished_active_record_base, feature_category: :backup_restore do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:connection_name) { 'main' }
+
+ subject(:config) { described_class.new(connection_name) }
+
+ describe '#initialize' do
+ it 'initializes with the provided connection_name' do
+ expect_next_instance_of(described_class) do |config|
+ expect(config.connection_name).to eq(connection_name)
+ end
+
+ config
+ end
+ end
+
+ describe '#activerecord_configuration' do
+ it 'returns a ActiveRecord::DatabaseConfigurations::HashConfig' do
+ expect(config.activerecord_configuration).to be_a ActiveRecord::DatabaseConfigurations::HashConfig
+ end
+ end
+
+ context 'with configuration override feature' do
+ let(:application_config) do
+ {
+ adapter: 'postgresql',
+ host: 'some_host',
+ port: '5432'
+ }
+ end
+
+ let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
+
+ before do
+ allow(config).to receive(:original_activerecord_config).and_return(application_config)
+ end
+
+ shared_context 'with generic database with overridden values' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_PGPORT' | '1543'
+ 'GITLAB_BACKUP_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ shared_context 'with generic database with overridden values using current database prefix' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_MAIN_PGPORT' | '1543'
+ 'GITLAB_BACKUP_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ shared_context 'with generic database with overridden values for a different database prefix' do
+ where(:env_variable, :overridden_value) do
+ 'GITLAB_BACKUP_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_BACKUP_CI_PGUSER' | 'some_user'
+ 'GITLAB_BACKUP_CI_PGPORT' | '1543'
+ 'GITLAB_BACKUP_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_BACKUP_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_BACKUP_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_BACKUP_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
+ 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
+ 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
+ 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
+ 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
+ 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
+ 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
+ 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
+ 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
+ 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
+ end
+ end
+
+ describe('#pg_env_variables') do
+ context 'with provided ENV variables' do
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ context 'when generic database configuration is overridden' do
+ include_context "with generic database with overridden values"
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(config.pg_env_variables).to include({ pg_env => overridden_value })
+ end
+ end
+ end
+
+ context 'when specific database configuration is overridden' do
+ context 'and environment variables are for the current database name' do
+ include_context 'with generic database with overridden values using current database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
+
+ it 'PostgreSQL ENV overrides application configuration' do
+ expect(config.pg_env_variables).to include({ pg_env => overridden_value })
+ end
+ end
+ end
+
+ context 'and environment variables are for another database' do
+ include_context 'with generic database with overridden values for a different database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
+
+ it 'PostgreSQL ENV is expected to equal application configuration' do
+ expect(config.pg_env_variables).to eq(
+ {
+ 'PGHOST' => application_config[:host],
+ 'PGPORT' => application_config[:port]
+ }
+ )
+ end
+ end
+ end
+ end
+ end
+
+ context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
+ it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
+ stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
+ stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specific_user')
+
+ expect(config.pg_env_variables['PGUSER']).to eq('specific_user')
+ end
+ end
+ end
+
+ describe('#activerecord_variables') do
+ context 'with provided ENV variables' do
+ before do
+ stub_env(env_variable, overridden_value)
+ end
+
+ context 'when generic database configuration is overridden' do
+ include_context "with generic database with overridden values"
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(config.activerecord_variables).to eq(
+ application_config.merge(active_record_key => overridden_value)
+ )
+ end
+ end
+ end
+
+ context 'when specific database configuration is overridden' do
+ context 'and environment variables are for the current database name' do
+ include_context 'with generic database with overridden values using current database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
+
+ it 'ActiveRecord backup configuration overrides application configuration' do
+ expect(config.activerecord_variables).to eq(
+ application_config.merge(active_record_key => overridden_value)
+ )
+ end
+ end
+ end
+
+ context 'and environment variables are for another database' do
+ include_context 'with generic database with overridden values for a different database prefix'
+
+ with_them do
+ let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
+
+ it 'ActiveRecord backup configuration is expected to equal application configuration' do
+ expect(config.activerecord_variables).to eq(application_config)
+ end
+ end
+ end
+ end
+ end
+
+ context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
+ with_them do
+ it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
+ stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
+ stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specific_user')
+
+ expect(config.activerecord_variables[:username]).to eq('specific_user')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/backup/database_connection_spec.rb b/spec/lib/backup/database_connection_spec.rb
new file mode 100644
index 00000000000..b56da3d99f7
--- /dev/null
+++ b/spec/lib/backup/database_connection_spec.rb
@@ -0,0 +1,103 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::DatabaseConnection, :reestablished_active_record_base, feature_category: :backup_restore do
+ let(:connection_name) { 'main' }
+ let(:snapshot_id_pattern) { /[A-Z0-9]{8}-[A-Z0-9]{8}-[0-9]/ }
+
+ subject(:backup_connection) { described_class.new(connection_name) }
+
+ describe '#initialize' do
+ it 'initializes database_configuration with the provided connection_name' do
+ expect(Backup::DatabaseConfiguration).to receive(:new).with(connection_name).and_call_original
+
+ backup_connection
+ end
+ end
+
+ describe '#connection_name' do
+ it 'returns the same connection name used during initialization' do
+ expect(backup_connection.connection_name).to eq(connection_name)
+ end
+ end
+
+ describe '#connection' do
+ it 'is an instance of a ActiveRecord::Base.connection' do
+ backup_connection.connection.is_a? Gitlab::Database::LoadBalancing::ConnectionProxy
+ end
+ end
+
+ describe '#database_configuration' do
+ it 'returns database configuration' do
+ expect(backup_connection.database_configuration).to be_a(Backup::DatabaseConfiguration)
+ end
+ end
+
+ describe '#snapshot_id' do
+ it "returns nil when snapshot has not been triggered" do
+ expect(backup_connection.snapshot_id).to be_nil
+ end
+
+ context 'when a snapshot transaction is open', :delete do
+ let!(:snapshot_id) { backup_connection.export_snapshot! }
+
+ it 'returns the snapshot_id in the expected format' do
+ expect(backup_connection.snapshot_id).to match(snapshot_id_pattern)
+ end
+
+ it 'returns the snapshot_id equal to the one returned by #export_snapshot!' do
+ expect(backup_connection.snapshot_id).to eq(snapshot_id)
+ end
+
+ it "returns nil after a snapshot is released" do
+ backup_connection.release_snapshot!
+
+ expect(backup_connection.snapshot_id).to be_nil
+ end
+ end
+ end
+
+ describe '#export_snapshot!', :delete do
+ it 'returns a snapshot_id in the expected format' do
+ expect(backup_connection.export_snapshot!).to match(snapshot_id_pattern)
+ end
+
+ it 'opens a transaction with correct isolation format and triggers a snapshot generation' do
+ expect(backup_connection.connection).to receive(:begin_transaction).with(
+ isolation: :repeatable_read
+ ).and_call_original
+
+ expect(backup_connection.connection).to receive(:select_value).with(
+ "SELECT pg_export_snapshot()"
+ ).and_call_original
+
+ backup_connection.export_snapshot!
+ end
+
+ it 'disables transaction time out' do
+ expect_next_instance_of(Gitlab::Database::TransactionTimeoutSettings) do |transaction_settings|
+ expect(transaction_settings).to receive(:disable_timeouts).and_call_original
+ end
+
+ backup_connection.export_snapshot!
+ end
+ end
+
+ describe '#release_snapshot!', :delete do
+ it 'clears out existing snapshot_id' do
+ snapshot_id = backup_connection.export_snapshot!
+
+ expect { backup_connection.release_snapshot! }.to change { backup_connection.snapshot_id }
+ .from(snapshot_id).to(nil)
+ end
+
+ it 'executes a transaction rollback' do
+ backup_connection.export_snapshot!
+
+ expect(backup_connection.connection).to receive(:rollback_transaction).and_call_original
+
+ backup_connection.release_snapshot!
+ end
+ end
+end
diff --git a/spec/lib/backup/database_model_spec.rb b/spec/lib/backup/database_model_spec.rb
deleted file mode 100644
index 522bba80972..00000000000
--- a/spec/lib/backup/database_model_spec.rb
+++ /dev/null
@@ -1,186 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Backup::DatabaseModel, :reestablished_active_record_base, feature_category: :backup_restore do
- using RSpec::Parameterized::TableSyntax
-
- let(:gitlab_database_name) { 'main' }
-
- describe '#connection' do
- subject { described_class.new(gitlab_database_name).connection }
-
- it 'an instance of a ActiveRecord::Base.connection' do
- subject.is_a? ActiveRecord::Base.connection.class # rubocop:disable Database/MultipleDatabases
- end
- end
-
- describe '#config' do
- let(:application_config) do
- {
- adapter: 'postgresql',
- host: 'some_host',
- port: '5432'
- }
- end
-
- subject { described_class.new(gitlab_database_name).config }
-
- before do
- allow(
- Gitlab::Database.database_base_models_with_gitlab_shared[gitlab_database_name].connection_db_config
- ).to receive(:configuration_hash).and_return(application_config)
- end
-
- shared_examples 'no configuration is overridden' do
- it 'ActiveRecord backup configuration is expected to equal application configuration' do
- expect(subject[:activerecord]).to eq(application_config)
- end
-
- it 'PostgreSQL ENV is expected to equal application configuration' do
- expect(subject[:pg_env]).to eq(
- {
- 'PGHOST' => application_config[:host],
- 'PGPORT' => application_config[:port]
- }
- )
- end
- end
-
- shared_examples 'environment variables override application configuration' do
- let(:active_record_key) { described_class::SUPPORTED_OVERRIDES.invert[pg_env] }
-
- it 'ActiveRecord backup configuration overrides application configuration' do
- expect(subject[:activerecord]).to eq(application_config.merge(active_record_key => overridden_value))
- end
-
- it 'PostgreSQL ENV overrides application configuration' do
- expect(subject[:pg_env]).to include({ pg_env => overridden_value })
- end
- end
-
- context 'when no GITLAB_BACKUP_PG* variables are set' do
- it_behaves_like 'no configuration is overridden'
- end
-
- context 'when generic database configuration is overridden' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_PGPORT' | '1543'
- 'GITLAB_BACKUP_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_(\w+)/, 2] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'environment variables override application configuration'
- end
- end
-
- context 'when specific database configuration is overridden' do
- context 'and environment variables are for the current database name' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_MAIN_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_MAIN_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_MAIN_PGPORT' | '1543'
- 'GITLAB_BACKUP_MAIN_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_MAIN_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_MAIN_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_MAIN_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_MAIN_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_MAIN_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_MAIN_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_MAIN_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_MAIN_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_MAIN_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_MAIN_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_MAIN_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_MAIN_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_MAIN_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_MAIN_(\w+)/, 2] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'environment variables override application configuration'
- end
- end
-
- context 'and environment variables are for another database' do
- where(:env_variable, :overridden_value) do
- 'GITLAB_BACKUP_CI_PGHOST' | 'test.invalid.'
- 'GITLAB_BACKUP_CI_PGUSER' | 'some_user'
- 'GITLAB_BACKUP_CI_PGPORT' | '1543'
- 'GITLAB_BACKUP_CI_PGPASSWORD' | 'secret'
- 'GITLAB_BACKUP_CI_PGSSLMODE' | 'allow'
- 'GITLAB_BACKUP_CI_PGSSLKEY' | 'some_key'
- 'GITLAB_BACKUP_CI_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_BACKUP_CI_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_BACKUP_CI_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_BACKUP_CI_PGSSLCOMPRESSION' | '1'
- 'GITLAB_OVERRIDE_CI_PGHOST' | 'test.invalid.'
- 'GITLAB_OVERRIDE_CI_PGUSER' | 'some_user'
- 'GITLAB_OVERRIDE_CI_PGPORT' | '1543'
- 'GITLAB_OVERRIDE_CI_PGPASSWORD' | 'secret'
- 'GITLAB_OVERRIDE_CI_PGSSLMODE' | 'allow'
- 'GITLAB_OVERRIDE_CI_PGSSLKEY' | 'some_key'
- 'GITLAB_OVERRIDE_CI_PGSSLCERT' | '/path/to/cert'
- 'GITLAB_OVERRIDE_CI_PGSSLROOTCERT' | '/path/to/root/cert'
- 'GITLAB_OVERRIDE_CI_PGSSLCRL' | '/path/to/crl'
- 'GITLAB_OVERRIDE_CI_PGSSLCOMPRESSION' | '1'
- end
-
- with_them do
- let(:pg_env) { env_variable[/GITLAB_(BACKUP|OVERRIDE)_CI_(\w+)/, 1] }
-
- before do
- stub_env(env_variable, overridden_value)
- end
-
- it_behaves_like 'no configuration is overridden'
- end
- end
-
- context 'when both GITLAB_BACKUP_PGUSER and GITLAB_BACKUP_MAIN_PGUSER variable are present' do
- before do
- stub_env('GITLAB_BACKUP_PGUSER', 'generic_user')
- stub_env('GITLAB_BACKUP_MAIN_PGUSER', 'specfic_user')
- end
-
- it 'prefers more specific GITLAB_BACKUP_MAIN_PGUSER' do
- config = subject
- expect(config.dig(:activerecord, :username)).to eq('specfic_user')
- expect(config.dig(:pg_env, 'PGUSER')).to eq('specfic_user')
- end
- end
- end
- end
-end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index 26d09c275c1..86468689f76 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -48,28 +48,16 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'uses snapshots' do
Dir.mktmpdir do |dir|
- expect_next_instances_of(Backup::DatabaseModel, 2) do |adapter|
- expect(adapter.connection).to receive(:begin_transaction).with(
- isolation: :repeatable_read
- ).and_call_original
- expect(adapter.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_call_original
- expect(adapter.connection).to receive(:rollback_transaction).and_call_original
- end
+ expect_next_instances_of(Backup::DatabaseConnection, 2) do |backup_connection|
+ expect(backup_connection).to receive(:export_snapshot!).and_call_original
- subject.dump(dir, backup_id)
- end
- end
+ expect_next_instance_of(::Gitlab::Backup::Cli::Utils::PgDump) do |pgdump|
+ expect(pgdump.snapshot_id).to eq(backup_connection.snapshot_id)
+ end
- it 'disables transaction time out' do
- number_of_databases = base_models_for_backup.count
- expect(Gitlab::Database::TransactionTimeoutSettings)
- .to receive(:new).exactly(2 * number_of_databases).times.and_return(timeout_service)
- expect(timeout_service).to receive(:disable_timeouts).exactly(number_of_databases).times
- expect(timeout_service).to receive(:restore_timeouts).exactly(number_of_databases).times
+ expect(backup_connection).to receive(:release_snapshot!).and_call_original
+ end
- Dir.mktmpdir do |dir|
subject.dump(dir, backup_id)
end
end
@@ -82,79 +70,18 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'does not use snapshots' do
Dir.mktmpdir do |dir|
- base_model = Backup::DatabaseModel.new('main')
- expect(base_model.connection).not_to receive(:begin_transaction).with(
- isolation: :repeatable_read
- ).and_call_original
- expect(base_model.connection).not_to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_call_original
- expect(base_model.connection).not_to receive(:rollback_transaction).and_call_original
-
- subject.dump(dir, backup_id)
- end
- end
- end
-
- describe 'pg_dump arguments' do
- let(:snapshot_id) { 'fake_id' }
- let(:default_pg_args) do
- args = [
- '--clean',
- '--if-exists'
- ]
-
- if Gitlab::Database.database_mode == Gitlab::Database::MODE_MULTIPLE_DATABASES
- args + ["--snapshot=#{snapshot_id}"]
- else
- args
- end
- end
+ expect_next_instance_of(Backup::DatabaseConnection) do |backup_connection|
+ expect(backup_connection).not_to receive(:export_snapshot!)
- let(:dumper) { double }
- let(:destination_dir) { 'tmp' }
-
- before do
- allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper)
- allow(dumper).to receive(:dump).with(any_args).and_return(true)
- end
-
- shared_examples 'pg_dump arguments' do
- it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
- number_of_databases = base_models_for_backup.count
- if number_of_databases > 1
- expect_next_instances_of(Backup::DatabaseModel, number_of_databases) do |model|
- expect(model.connection).to receive(:select_value).with(
- "SELECT pg_export_snapshot()"
- ).and_return(snapshot_id)
+ expect_next_instance_of(::Gitlab::Backup::Cli::Utils::PgDump) do |pgdump|
+ expect(pgdump.snapshot_id).to be_nil
end
- end
-
- expect(dumper).to receive(:dump).with(anything, anything, expected_pg_args)
- subject.dump(destination_dir, backup_id)
- end
- end
-
- context 'when no PostgreSQL schemas are specified' do
- let(:expected_pg_args) { default_pg_args }
-
- include_examples 'pg_dump arguments'
- end
-
- context 'when a PostgreSQL schema is used' do
- let(:schema) { 'gitlab' }
- let(:expected_pg_args) do
- default_pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.flat_map do |schema|
- ['-n', schema.to_s]
+ expect(backup_connection).not_to receive(:release_snapshot!)
end
- end
- before do
- allow(Gitlab.config.backup).to receive(:pg_schema).and_return(schema)
+ subject.dump(dir, backup_id)
end
-
- include_examples 'pg_dump arguments'
end
end
diff --git a/spec/lib/backup/dump/postgres_spec.rb b/spec/lib/backup/dump/postgres_spec.rb
index f7020c78c6c..1da2ee950db 100644
--- a/spec/lib/backup/dump/postgres_spec.rb
+++ b/spec/lib/backup/dump/postgres_spec.rb
@@ -3,17 +3,22 @@
require 'spec_helper'
RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do
- describe '#dump' do
- let(:pg_database) { 'gitlabhq_test' }
- let(:destination_dir) { Dir.mktmpdir }
- let(:db_file_name) { File.join(destination_dir, 'output.gz') }
+ let(:pg_database) { 'gitlabhq_test' }
+ let(:pg_dump) { ::Gitlab::Backup::Cli::Utils::PgDump.new(database_name: pg_database) }
+ let(:default_compression_cmd) { 'gzip -c -1' }
- let(:pipes) { IO.pipe }
- let(:gzip_pid) { spawn('gzip -c -1', in: pipes[0], out: [db_file_name, 'w', 0o600]) }
- let(:pg_dump_pid) { Process.spawn('pg_dump', *args, pg_database, out: pipes[1]) }
- let(:args) { ['--help'] }
+ subject(:postgres) { described_class.new }
- subject { described_class.new }
+ describe '#compress_cmd' do
+ it 'returns default compression command' do
+ expect(postgres.compress_cmd).to eq(default_compression_cmd)
+ end
+ end
+
+ describe '#dump' do
+ let(:pipes) { IO.pipe }
+ let(:destination_dir) { Dir.mktmpdir }
+ let(:dump_file_name) { File.join(destination_dir, 'output.gz') }
before do
allow(IO).to receive(:pipe).and_return(pipes)
@@ -23,33 +28,54 @@ RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do
FileUtils.remove_entry destination_dir
end
- it 'creates gzipped dump using supplied arguments' do
- expect(subject).to receive(:spawn).with('gzip -c -1', in: pipes.first,
- out: [db_file_name, 'w', 0o600]).and_return(gzip_pid)
- expect(Process).to receive(:spawn).with('pg_dump', *args, pg_database, out: pipes[1]).and_return(pg_dump_pid)
+ context 'with default compression method' do
+ it 'creates a dump file' do
+ postgres.dump(dump_file_name, pg_dump)
- subject.dump(pg_database, db_file_name, args)
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+
+ it 'default compression command is used' do
+ compressor_pid = spawn(default_compression_cmd, in: pipes[0], out: [dump_file_name, 'w', 0o600])
+
+ expect(postgres).to receive(:spawn).with(
+ default_compression_cmd,
+ in: pipes.first,
+ out: [dump_file_name, 'w', 0o600]).and_return(compressor_pid)
- expect(File.exist?(db_file_name)).to eq(true)
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
end
context 'when COMPRESS_CMD is set to tee' do
- let(:tee_pid) { spawn('tee', in: pipes[0], out: [db_file_name, 'w', 0o600]) }
+ let(:tee_pid) { spawn('tee', in: pipes[0], out: [dump_file_name, 'w', 0o600]) }
before do
stub_env('COMPRESS_CMD', 'tee')
end
+ it 'creates a dump file' do
+ postgres.dump(dump_file_name, pg_dump)
+
+ expect(File.exist?(dump_file_name)).to eq(true)
+ end
+
it 'passes through tee instead of gzip' do
- expect(subject).to receive(:spawn).with('tee', in: pipes.first,
- out: [db_file_name, 'w', 0o600]).and_return(tee_pid)
- expect(Process).to receive(:spawn).with('pg_dump', *args, pg_database, out: pipes[1]).and_return(pg_dump_pid)
+ custom_compression_command = 'tee'
+ compressor_pid = spawn(custom_compression_command, in: pipes[0], out: [dump_file_name, 'w', 0o600])
+
+ expect(postgres).to receive(:spawn).with(
+ custom_compression_command,
+ in: pipes.first,
+ out: [dump_file_name, 'w', 0o600]).and_return(compressor_pid)
expect do
- subject.dump(pg_database, db_file_name, args)
+ postgres.dump(dump_file_name, pg_dump)
end.to output(/Using custom COMPRESS_CMD 'tee'/).to_stdout
- expect(File.exist?(db_file_name)).to eq(true)
+ expect(File.exist?(dump_file_name)).to eq(true)
end
end
end
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 08679626c21..eeb0bbb8e7d 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2224,46 +2224,82 @@ RSpec.describe Repository, feature_category: :source_code_management do
describe 'rolling back the `rebase_commit_sha`' do
let(:new_sha) { Digest::SHA1.hexdigest('foo') }
- it 'does not rollback when there are no errors' do
- second_response = double(pre_receive_error: nil, git_error: nil)
- mock_gitaly(second_response)
+ context 'when there are no errors' do
+ before do
+ responses = [
+ double(rebase_sha: new_sha).as_null_object,
+ double
+ ]
+
+ expect_any_instance_of(
+ Gitaly::OperationService::Stub
+ ).to receive(:user_rebase_confirmable).and_return(responses.each)
+ end
- repository.rebase(user, merge_request)
+ it 'does not rollback when there are no errors' do
+ repository.rebase(user, merge_request)
- expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
+ expect(merge_request.reload.rebase_commit_sha).to eq(new_sha)
+ end
end
- it 'does rollback when a PreReceiveError is encountered in the second step' do
- second_response = double(pre_receive_error: 'my_error', git_error: nil)
- mock_gitaly(second_response)
+ context 'when there was an error' do
+ let(:first_response) do
+ double(rebase_sha: new_sha).as_null_object
+ end
- expect do
- repository.rebase(user, merge_request)
- end.to raise_error(Gitlab::Git::PreReceiveError)
+ before do
+ request_enum = double(push: nil).as_null_object
+ allow(Gitlab::GitalyClient::QueueEnumerator).to receive(:new).and_return(request_enum)
- expect(merge_request.reload.rebase_commit_sha).to be_nil
- end
+ expect_any_instance_of(
+ Gitaly::OperationService::Stub
+ ).to receive(:user_rebase_confirmable).and_return(first_response)
- it 'does rollback when a GitError is encountered in the second step' do
- second_response = double(pre_receive_error: nil, git_error: 'git error')
- mock_gitaly(second_response)
+ # Faking second request failure
+ allow(request_enum).to receive(:push)
+ .with(Gitaly::UserRebaseConfirmableRequest.new(apply: true)) { raise(error) }
+ end
- expect do
- repository.rebase(user, merge_request)
- end.to raise_error(Gitlab::Git::Repository::GitError)
+ context 'when a PreReceiveError is encountered in the second step' do
+ let(:error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserRebaseConfirmableError.new(
+ access_check: Gitaly::AccessCheckError.new(
+ error_message: 'something went wrong'
+ )))
+ end
- expect(merge_request.reload.rebase_commit_sha).to be_nil
- end
+ it 'does rollback' do
+ expect do
+ repository.rebase(user, merge_request)
+ end.to raise_error(Gitlab::Git::PreReceiveError)
- def mock_gitaly(second_response)
- responses = [
- double(rebase_sha: new_sha).as_null_object,
- second_response
- ]
+ expect(merge_request.reload.rebase_commit_sha).to be_nil
+ end
+ end
- expect_any_instance_of(
- Gitaly::OperationService::Stub
- ).to receive(:user_rebase_confirmable).and_return(responses.each)
+ context 'when a when a GitError is encountered in the second step' do
+ let(:error) do
+ new_detailed_error(
+ GRPC::Core::StatusCodes::INTERNAL,
+ 'something failed',
+ Gitaly::UserSquashError.new(
+ rebase_conflict: Gitaly::MergeConflictError.new(
+ conflicting_files: ['conflicting-file']
+ )))
+ end
+
+ it 'does rollback' do
+ expect do
+ repository.rebase(user, merge_request)
+ end.to raise_error(Gitlab::Git::Repository::GitError)
+
+ expect(merge_request.reload.rebase_commit_sha).to be_nil
+ end
+ end
end
end
end
diff --git a/spec/views/profiles/keys/_form.html.haml_spec.rb b/spec/views/profiles/keys/_form.html.haml_spec.rb
index dd8af14100a..f427804d005 100644
--- a/spec/views/profiles/keys/_form.html.haml_spec.rb
+++ b/spec/views/profiles/keys/_form.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'profiles/keys/_form.html.haml' do
+RSpec.describe 'profiles/keys/_form.html.haml', feature_category: :system_access do
include SshKeysHelper
let_it_be(:key) { Key.new }
@@ -44,7 +44,7 @@ RSpec.describe 'profiles/keys/_form.html.haml' do
end
it 'has the validation warning', :aggregate_failures do
- expect(rendered).to have_text("Oops, are you sure? Publicly visible private SSH keys can compromise your system.")
+ expect(rendered).to have_text("Are you sure? Publicly visible private SSH keys can compromise your system.")
expect(rendered).to have_button('Yes, add it')
end
diff --git a/spec/workers/process_commit_worker_spec.rb b/spec/workers/process_commit_worker_spec.rb
index 02221285ad3..956e29ec7f4 100644
--- a/spec/workers/process_commit_worker_spec.rb
+++ b/spec/workers/process_commit_worker_spec.rb
@@ -3,180 +3,186 @@
require 'spec_helper'
RSpec.describe ProcessCommitWorker, feature_category: :source_code_management do
- let(:worker) { described_class.new }
- let(:user) { create(:user) }
+ let_it_be(:user) { create(:user) }
+
let(:project) { create(:project, :public, :repository) }
let(:issue) { create(:issue, project: project, author: user) }
let(:commit) { project.commit }
+ let(:worker) { described_class.new }
+
it "is deduplicated" do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
expect(described_class.get_deduplication_options).to include(feature_flag: :deduplicate_process_commit_worker)
end
describe '#perform' do
- it 'does not process the commit when the project does not exist' do
- expect(worker).not_to receive(:close_issues)
+ subject(:perform) { worker.perform(project_id, user_id, commit.to_hash, default) }
- worker.perform(-1, user.id, commit.to_hash)
- end
-
- it 'does not process the commit when the user does not exist' do
- expect(worker).not_to receive(:close_issues)
+ let(:project_id) { project.id }
+ let(:user_id) { user.id }
- worker.perform(project.id, -1, commit.to_hash)
+ before do
+ allow(Commit).to receive(:build_from_sidekiq_hash).and_return(commit)
end
- include_examples 'an idempotent worker' do
- subject do
- perform_multiple([project.id, user.id, commit.to_hash], worker: worker)
- end
-
- it 'processes the commit message' do
- expect(worker).to receive(:process_commit_message)
- .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
- .and_call_original
+ context 'when pushing to the default branch' do
+ let(:default) { true }
- subject
- end
+ context 'when project does not exist' do
+ let(:project_id) { -1 }
- it 'updates the issue metrics' do
- expect(worker).to receive(:update_issue_metrics)
- .exactly(IdempotentWorkerHelper::WORKER_EXEC_TIMES)
- .and_call_original
+ it 'does not close related issues' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(0)
- subject
+ perform
+ end
end
- end
- end
- describe '#process_commit_message' do
- context 'when pushing to the default branch' do
- before do
- allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
- end
+ context 'when user does not exist' do
+ let(:user_id) { -1 }
- it 'closes issues that should be closed per the commit message' do
- expect(worker).to receive(:close_issues).with(project, user, user, commit, [issue])
+ it 'does not close related issues' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- worker.process_commit_message(project, commit, user, user, true)
+ perform
+ end
end
- it 'creates cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [issue])
-
- worker.process_commit_message(project, commit, user, user, true)
- end
- end
+ include_examples 'an idempotent worker' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ issue.metrics.update!(first_mentioned_in_commit_at: nil)
+ end
- context 'when pushing to a non-default branch' do
- it 'does not close any issues' do
- allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ subject do
+ perform_multiple([project.id, user.id, commit.to_hash], worker: worker)
+ end
- expect(worker).not_to receive(:close_issues)
+ it 'closes related issues' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(1)
- worker.process_commit_message(project, commit, user, user, false)
+ subject
+ end
end
- it 'does not create cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [])
+ context 'when commit is not a merge request merge commit' do
+ context 'when commit has issue reference' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
+ end
+
+ it 'closes issues that should be closed per the commit message' do
+ expect { perform }.to change { Issues::CloseWorker.jobs.size }.by(1)
+ end
+
+ it 'creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(user, [issue])
+
+ perform
+ end
+
+ describe 'issue metrics', :clean_gitlab_redis_cache do
+ context 'when issue has no first_mentioned_in_commit_at set' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: nil)
+ end
+
+ it 'updates issue metrics' do
+ expect { perform }.to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ .to(commit.committed_date)
+ end
+ end
+
+ context 'when issue has first_mentioned_in_commit_at earlier than given committed_date' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date - 1.day)
+ end
+
+ it "doesn't update issue metrics" do
+ expect { perform }.not_to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ end
+ end
+
+ context 'when issue has first_mentioned_in_commit_at later than given committed_date' do
+ before do
+ issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date + 1.day)
+ end
+
+ it 'updates issue metrics' do
+ expect { perform }.to change { issue.metrics.reload.first_mentioned_in_commit_at }
+ .to(commit.committed_date)
+ end
+ end
+ end
+ end
- worker.process_commit_message(project, commit, user, user, false)
- end
- end
+ context 'when commit has no issue references' do
+ before do
+ allow(commit).to receive(:safe_message).and_return("Lorem Ipsum")
+ end
- context 'when commit is a merge request merge commit to the default branch' do
- let(:merge_request) do
- create(
- :merge_request,
- description: "Closes #{issue.to_reference}",
- source_branch: 'feature-merged',
- target_branch: 'master',
- source_project: project
- )
+ describe 'issue metrics' do
+ it "doesn't execute any queries with false conditions" do
+ expect { perform }.not_to make_queries_matching(/WHERE (?:1=0|0=1)/)
+ end
+ end
+ end
end
- let(:commit) do
- project.repository.create_branch('feature-merged', 'feature')
- project.repository.after_create_branch
+ context 'when commit is a merge request merge commit' do
+ let(:merge_request) do
+ create(
+ :merge_request,
+ description: "Closes #{issue.to_reference}",
+ source_branch: 'feature-merged',
+ target_branch: 'master',
+ source_project: project
+ )
+ end
- MergeRequests::MergeService
- .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
- .execute(merge_request)
+ let(:commit) do
+ project.repository.create_branch('feature-merged', 'feature')
+ project.repository.after_create_branch
- merge_request.reload.merge_commit
- end
+ MergeRequests::MergeService
+ .new(project: project, current_user: merge_request.author, params: { sha: merge_request.diff_head_sha })
+ .execute(merge_request)
- it 'does not close any issues from the commit message' do
- expect(worker).not_to receive(:close_issues)
+ merge_request.reload.merge_commit
+ end
- worker.process_commit_message(project, commit, user, user, true)
- end
+ it 'does not close any issues from the commit message' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- it 'still creates cross references' do
- expect(commit).to receive(:create_cross_references!).with(user, [])
+ perform
+ end
- worker.process_commit_message(project, commit, user, user, true)
- end
- end
- end
+ it 'still creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(commit.author, [])
- describe '#close_issues' do
- it 'creates Issue::CloseWorker jobs' do
- expect do
- worker.close_issues(project, user, user, commit, [issue])
- end.to change { Issues::CloseWorker.jobs.size }.by(1)
+ perform
+ end
+ end
end
- end
- describe '#update_issue_metrics', :clean_gitlab_redis_cache do
- context 'when commit has issue reference' do
- subject(:update_metrics_and_reload) do
- -> {
- worker.update_issue_metrics(commit, user)
- issue.metrics.reload
- }
- end
+ context 'when pushing to a non-default branch' do
+ let(:default) { false }
before do
allow(commit).to receive(:safe_message).and_return("Closes #{issue.to_reference}")
end
- context 'when issue has no first_mentioned_in_commit_at set' do
- it 'updates issue metrics' do
- expect { update_metrics_and_reload.call }
- .to change { issue.metrics.first_mentioned_in_commit_at }.to(commit.committed_date)
- end
- end
-
- context 'when issue has first_mentioned_in_commit_at earlier than given committed_date' do
- before do
- issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date - 1.day)
- end
-
- it "doesn't update issue metrics" do
- expect { update_metrics_and_reload.call }.not_to change { issue.metrics.first_mentioned_in_commit_at }
- end
- end
-
- context 'when issue has first_mentioned_in_commit_at later than given committed_date' do
- before do
- issue.metrics.update!(first_mentioned_in_commit_at: commit.committed_date + 1.day)
- end
+ it 'does not close any issues from the commit message' do
+ expect { perform }.not_to change { Issues::CloseWorker.jobs.size }
- it "doesn't update issue metrics" do
- expect { update_metrics_and_reload.call }
- .to change { issue.metrics.first_mentioned_in_commit_at }.to(commit.committed_date)
- end
+ perform
end
- end
- context 'when commit has no issue references' do
- it "doesn't execute any queries with false conditions" do
- allow(commit).to receive(:safe_message).and_return("Lorem Ipsum")
+ it 'still creates cross references' do
+ expect(commit).to receive(:create_cross_references!).with(user, [])
- expect { worker.update_issue_metrics(commit, user) }
- .not_to make_queries_matching(/WHERE (?:1=0|0=1)/)
+ perform
end
end
end