Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.gitlab/ci/rails.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/rules.gitlab-ci.yml2
-rw-r--r--.gitlab/ci/static-analysis.gitlab-ci.yml10
-rw-r--r--GITALY_SERVER_VERSION2
-rw-r--r--GITLAB_ELASTICSEARCH_INDEXER_VERSION2
-rw-r--r--app/controllers/search_controller.rb12
-rw-r--r--app/graphql/resolvers/ci/runner_groups_resolver.rb2
-rw-r--r--app/graphql/types/ci/runner_type.rb7
-rw-r--r--app/services/bulk_imports/create_service.rb2
-rw-r--r--app/views/search/_category.html.haml2
-rw-r--r--app/views/search/_results.html.haml4
-rw-r--r--app/views/search/_results_status.html.haml8
-rw-r--r--app/views/search/_results_status_horiz_nav.html.haml20
-rw-r--r--app/views/search/_results_status_vert_nav.html.haml20
-rw-r--r--app/views/search/show.html.haml2
-rw-r--r--config/feature_flags/development/send_traversal_ids_to_indexer.yml8
-rw-r--r--doc/api/graphql/reference/index.md2
-rw-r--r--lib/backup/database.rb189
-rw-r--r--lib/backup/dump/postgres.rb22
-rw-r--r--lib/backup/manager.rb27
-rw-r--r--lib/tasks/gitlab/backup.rake6
-rw-r--r--locale/gitlab.pot9
-rw-r--r--scripts/allowed_warnings.txt11
-rwxr-xr-xscripts/static-analysis28
-rw-r--r--scripts/utils.sh31
-rw-r--r--spec/fixtures/database.sql.gzbin0 -> 30 bytes
-rw-r--r--spec/lib/backup/database_spec.rb183
-rw-r--r--spec/lib/backup/dump/postgres_spec.rb36
-rw-r--r--spec/lib/backup/manager_spec.rb15
-rw-r--r--spec/models/concerns/triggerable_hooks_spec.rb2
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb18
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb12
-rw-r--r--spec/views/search/_results.html.haml_spec.rb9
-rw-r--r--spec/views/search/show.html.haml_spec.rb2
34 files changed, 471 insertions, 236 deletions
diff --git a/.gitlab/ci/rails.gitlab-ci.yml b/.gitlab/ci/rails.gitlab-ci.yml
index 2f02ff6a3fe..38f474160bc 100644
--- a/.gitlab/ci/rails.gitlab-ci.yml
+++ b/.gitlab/ci/rails.gitlab-ci.yml
@@ -333,7 +333,7 @@ rspec:deprecations:
script:
- grep -h -R "keyword" deprecations/ | awk '{$1=$1};1' | sort | uniq -c | sort
- grep -R "keyword" deprecations/ | wc
- - run_timed_command "bundle exec rubocop --only Lint/LastKeywordArgument --parallel"
+ - run_timed_command "fail_on_warnings bundle exec rubocop --only Lint/LastKeywordArgument --parallel"
artifacts:
expire_in: 31d
when: always
diff --git a/.gitlab/ci/rules.gitlab-ci.yml b/.gitlab/ci/rules.gitlab-ci.yml
index 047a2f8ae74..410dcec1450 100644
--- a/.gitlab/ci/rules.gitlab-ci.yml
+++ b/.gitlab/ci/rules.gitlab-ci.yml
@@ -92,7 +92,7 @@
if: '$CI_MERGE_REQUEST_LABELS =~ /group::global search/'
.if-merge-request-labels-pipeline-expedite: &if-merge-request-labels-pipeline-expedite
- if: '$CI_MERGE_REQUEST_LABELS =~ /master:(foss-)?broken/ && $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
+ if: '($CI_MERGE_REQUEST_LABELS =~ /master:(foss-)?broken/ || $CI_MERGE_REQUEST_TITLE =~ /^[Rr]evert/) && $CI_MERGE_REQUEST_LABELS =~ /pipeline:expedite/'
.if-merge-request-labels-frontend-and-feature-flag: &if-merge-request-labels-frontend-and-feature-flag
if: '$CI_MERGE_REQUEST_LABELS =~ /frontend/ && $CI_MERGE_REQUEST_LABELS =~ /feature flag/'
diff --git a/.gitlab/ci/static-analysis.gitlab-ci.yml b/.gitlab/ci/static-analysis.gitlab-ci.yml
index 0a310691cd7..13013d9a9db 100644
--- a/.gitlab/ci/static-analysis.gitlab-ci.yml
+++ b/.gitlab/ci/static-analysis.gitlab-ci.yml
@@ -22,7 +22,7 @@ update-static-analysis-cache:
# Silence cop offenses for rules with "grace period".
# This will notify Slack if offenses were silenced.
# For the moment we only cache `tmp/rubocop_cache` so we don't need to run all the tasks.
- - run_timed_command "bundle exec rake rubocop:check:graceful"
+ - run_timed_command "fail_on_warnings bundle exec rake rubocop:check:graceful"
static-analysis:
extends:
@@ -32,7 +32,7 @@ static-analysis:
parallel: 2
script:
- yarn_install_script
- - scripts/static-analysis
+ - fail_on_warnings scripts/static-analysis
static-analysis as-if-foss:
extends:
@@ -132,12 +132,12 @@ rubocop:
# We won't notify Slack if offenses were silenced to avoid frequent messages.
# Job `update-static-analysis-cache` takes care of Slack notifications every 2 hours.
unset CI_SLACK_WEBHOOK_URL
- run_timed_command "bundle exec rake rubocop:check:graceful"
+ run_timed_command "fail_on_warnings bundle exec rake rubocop:check:graceful"
else
cat "${RSPEC_CHANGED_FILES_PATH}" | ruby -e 'print $stdin.read.split(" ").select { |f| File.exist?(f) }.join(" ")' > "$RUBOCOP_TARGET_FILES"
# Skip running RuboCop if there's no target files
if [ -s "${RUBOCOP_TARGET_FILES}" ]; then
- run_timed_command "bundle exec rubocop --parallel --force-exclusion $(cat ${RUBOCOP_TARGET_FILES})"
+ run_timed_command "fail_on_warnings bundle exec rubocop --parallel --force-exclusion $(cat ${RUBOCOP_TARGET_FILES})"
else
echoinfo "Nothing interesting changed for RuboCop. Skipping."
fi
@@ -177,7 +177,7 @@ feature-flags-usage:
script:
# We need to disable the cache for this cop since it creates files under tmp/feature_flags/*.used,
# the cache would prevent these files from being created.
- - run_timed_command "bundle exec rubocop --only Gitlab/MarkUsedFeatureFlags --cache false"
+ - run_timed_command "fail_on_warnings bundle exec rubocop --only Gitlab/MarkUsedFeatureFlags --cache false"
artifacts:
expire_in: 31d
when: always
diff --git a/GITALY_SERVER_VERSION b/GITALY_SERVER_VERSION
index 3077eca8e9b..037c4b1eded 100644
--- a/GITALY_SERVER_VERSION
+++ b/GITALY_SERVER_VERSION
@@ -1 +1 @@
-0ec4abd0c0b834dd49144c0821e056baaf8fe025
+78b286e5254bf08ad4e0c95226a9c3e30e98f3d4
diff --git a/GITLAB_ELASTICSEARCH_INDEXER_VERSION b/GITLAB_ELASTICSEARCH_INDEXER_VERSION
index fd2a01863fd..ef538c28109 100644
--- a/GITLAB_ELASTICSEARCH_INDEXER_VERSION
+++ b/GITLAB_ELASTICSEARCH_INDEXER_VERSION
@@ -1 +1 @@
-3.1.0
+3.1.2
diff --git a/app/controllers/search_controller.rb b/app/controllers/search_controller.rb
index 66968b34380..52b91b9930d 100644
--- a/app/controllers/search_controller.rb
+++ b/app/controllers/search_controller.rb
@@ -47,7 +47,7 @@ class SearchController < ApplicationController
def show
@project = search_service.project
@group = search_service.group
- @search_service = Gitlab::View::Presenter::Factory.new(search_service, current_user: current_user).fabricate!
+ @search_service_presenter = Gitlab::View::Presenter::Factory.new(search_service, current_user: current_user).fabricate!
return unless search_term_valid?
@@ -56,14 +56,14 @@ class SearchController < ApplicationController
@search_term = params[:search]
@sort = params[:sort] || default_sort
- @search_level = @search_service.level
+ @search_level = @search_service_presenter.level
@search_type = search_type
@global_search_duration_s = Benchmark.realtime do
- @scope = @search_service.scope
- @search_results = @search_service.search_results
- @search_objects = @search_service.search_objects
- @search_highlight = @search_service.search_highlight
+ @scope = @search_service_presenter.scope
+ @search_results = @search_service_presenter.search_results
+ @search_objects = @search_service_presenter.search_objects
+ @search_highlight = @search_service_presenter.search_highlight
end
Gitlab::Metrics::GlobalSearchSlis.record_apdex(
diff --git a/app/graphql/resolvers/ci/runner_groups_resolver.rb b/app/graphql/resolvers/ci/runner_groups_resolver.rb
index 3360e820bd2..c1d9bcbb9bb 100644
--- a/app/graphql/resolvers/ci/runner_groups_resolver.rb
+++ b/app/graphql/resolvers/ci/runner_groups_resolver.rb
@@ -6,7 +6,7 @@ module Resolvers
include Gitlab::Graphql::Authorize::AuthorizeResource
include ResolvesGroups
- type Types::GroupConnection, null: true
+ type 'Types::GroupConnection', null: true
authorize :read_runner
authorizes_object!
diff --git a/app/graphql/types/ci/runner_type.rb b/app/graphql/types/ci/runner_type.rb
index 5d34906f7b8..53595668649 100644
--- a/app/graphql/types/ci/runner_type.rb
+++ b/app/graphql/types/ci/runner_type.rb
@@ -38,10 +38,9 @@ module Types
field :executor_name, GraphQL::Types::String, null: true,
description: 'Executor last advertised by the runner.',
method: :executor_name
- field :groups, 'Types::GroupConnection',
- null: true,
- resolver: ::Resolvers::Ci::RunnerGroupsResolver,
- description: 'Groups the runner is associated with. For group runners only.'
+ field :groups, null: true,
+ resolver: ::Resolvers::Ci::RunnerGroupsResolver,
+ description: 'Groups the runner is associated with. For group runners only.'
field :id, ::Types::GlobalIDType[::Ci::Runner], null: false,
description: 'ID of the runner.'
field :ip_address, GraphQL::Types::String, null: true,
diff --git a/app/services/bulk_imports/create_service.rb b/app/services/bulk_imports/create_service.rb
index 124b5964232..26d678cfe3c 100644
--- a/app/services/bulk_imports/create_service.rb
+++ b/app/services/bulk_imports/create_service.rb
@@ -43,7 +43,7 @@ module BulkImports
BulkImportWorker.perform_async(bulk_import.id)
ServiceResponse.success(payload: bulk_import)
- rescue ActiveRecord::RecordInvalid => e
+ rescue ActiveRecord::RecordInvalid, BulkImports::NetworkError => e
ServiceResponse.error(
message: e.message,
http_status: :unprocessable_entity
diff --git a/app/views/search/_category.html.haml b/app/views/search/_category.html.haml
index 3e483fe8cd2..74a5d5fb425 100644
--- a/app/views/search/_category.html.haml
+++ b/app/views/search/_category.html.haml
@@ -23,7 +23,7 @@
= search_filter_link 'milestones', _("Milestones")
= users
- - elsif @search_service.show_snippets?
+ - elsif @search_service_presenter.show_snippets?
= search_filter_link 'snippet_titles', _("Titles and Descriptions"), search: { snippets: true, group_id: nil, project_id: nil }
- else
= search_filter_link 'projects', _("Projects"), data: { qa_selector: 'projects_tab' }
diff --git a/app/views/search/_results.html.haml b/app/views/search/_results.html.haml
index 027ae6bf77c..4fc9d6b06d6 100644
--- a/app/views/search/_results.html.haml
+++ b/app/views/search/_results.html.haml
@@ -5,10 +5,10 @@
.results.gl-md-display-flex.gl-mt-0
#js-search-sidebar{ class: search_bar_classes, data: { navigation: search_navigation_json } }
.gl-w-full.gl-flex-grow-1.gl-overflow-x-hidden
- = render partial: 'search/results_status', locals: { search_service: @search_service } unless @search_objects.to_a.empty?
+ = render partial: 'search/results_status' unless @search_objects.to_a.empty?
= render partial: 'search/results_list'
- else
- = render partial: 'search/results_status', locals: { search_service: @search_service } unless @search_objects.to_a.empty?
+ = render partial: 'search/results_status' unless @search_objects.to_a.empty?
.results.gl-md-display-flex.gl-mt-3
- if %w[issues merge_requests].include?(@scope)
diff --git a/app/views/search/_results_status.html.haml b/app/views/search/_results_status.html.haml
index adea6b598f7..3cd100db8b7 100644
--- a/app/views/search/_results_status.html.haml
+++ b/app/views/search/_results_status.html.haml
@@ -1,8 +1,6 @@
-- search_service = local_assigns.fetch(:search_service)
-
-- return unless search_service.show_results_status?
+- return unless @search_service_presenter.show_results_status?
- if Feature.enabled?(:search_page_vertical_nav, current_user)
- = render partial: 'search/results_status_vert_nav', locals: { search_service: search_service }
+ = render partial: 'search/results_status_vert_nav'
- else
- = render partial: 'search/results_status_horiz_nav', locals: { search_service: search_service }
+ = render partial: 'search/results_status_horiz_nav'
diff --git a/app/views/search/_results_status_horiz_nav.html.haml b/app/views/search/_results_status_horiz_nav.html.haml
index fe6ee0f12ec..c0778b70c04 100644
--- a/app/views/search/_results_status_horiz_nav.html.haml
+++ b/app/views/search/_results_status_horiz_nav.html.haml
@@ -1,22 +1,22 @@
.search-results-status
.row-content-block.gl-display-flex
.gl-md-display-flex.gl-text-left.gl-align-items-center.gl-flex-grow-1
- - unless search_service.without_count?
- = search_entries_info(search_service.search_objects, search_service.scope, params[:search])
- - unless search_service.show_snippets?
- - if search_service.project
- - link_to_project = link_to(search_service.project.full_name, search_service.project, class: 'ml-md-1')
- - if search_service.scope == 'blobs'
+ - unless @search_service_presenter.without_count?
+ = search_entries_info(@search_objects, @scope, @search_term)
+ - unless @search_service_presenter.show_snippets?
+ - if @project
+ - link_to_project = link_to(@project.full_name, @project, class: 'ml-md-1')
+ - if @scope == 'blobs'
= _("in")
.mx-md-1
- #js-blob-ref-switcher{ data: { "project-id" => search_service.project.id, "ref" => repository_ref(search_service.project), "field-name": "repository_ref" } }
+ #js-blob-ref-switcher{ data: { "project-id" => @project.id, "ref" => repository_ref(@project), "field-name": "repository_ref" } }
= s_('SearchCodeResults|of %{link_to_project}').html_safe % { link_to_project: link_to_project }
- else
= _("in project %{link_to_project}").html_safe % { link_to_project: link_to_project }
- - elsif search_service.group
- - link_to_group = link_to(search_service.group.name, search_service.group, class: 'ml-md-1')
+ - elsif @group
+ - link_to_group = link_to(@group.name, @group, class: 'ml-md-1')
= _("in group %{link_to_group}").html_safe % { link_to_group: link_to_group }
- - if search_service.show_sort_dropdown?
+ - if @search_service_presenter.show_sort_dropdown?
.gl-md-display-flex.gl-flex-direction-column
#js-search-sort{ data: { "search-sort-options" => search_sort_options.to_json } }
diff --git a/app/views/search/_results_status_vert_nav.html.haml b/app/views/search/_results_status_vert_nav.html.haml
index 03916911f43..29cc0a20123 100644
--- a/app/views/search/_results_status_vert_nav.html.haml
+++ b/app/views/search/_results_status_vert_nav.html.haml
@@ -2,22 +2,22 @@
.gl-display-flex.gl-flex-direction-column
.gl-p-5.gl-display-flex
.gl-md-display-flex.gl-text-left.gl-align-items-center.gl-flex-grow-1
- - unless search_service.without_count?
- = search_entries_info(search_service.search_objects, search_service.scope, params[:search])
- - unless search_service.show_snippets?
- - if search_service.project
- - link_to_project = link_to(search_service.project.full_name, search_service.project, class: 'ml-md-1')
- - if search_service.scope == 'blobs'
+ - unless @search_service_presenter.without_count?
+ = search_entries_info(@search_objects, @scope, @search_term)
+ - unless @search_service_presenter.show_snippets?
+ - if @project
+ - link_to_project = link_to(@project.full_name, @project, class: 'ml-md-1')
+ - if @scope == 'blobs'
= _("in")
.mx-md-1
- #js-blob-ref-switcher{ data: { "project-id" => search_service.project.id, "ref" => repository_ref(search_service.project), "field-name": "repository_ref" } }
+ #js-blob-ref-switcher{ data: { "project-id" => @project.id, "ref" => repository_ref(@project), "field-name": "repository_ref" } }
= s_('SearchCodeResults|of %{link_to_project}').html_safe % { link_to_project: link_to_project }
- else
= _("in project %{link_to_project}").html_safe % { link_to_project: link_to_project }
- - elsif search_service.group
- - link_to_group = link_to(search_service.group.name, search_service.group, class: 'ml-md-1')
+ - elsif @group
+ - link_to_group = link_to(@group.name, @group, class: 'ml-md-1')
= _("in group %{link_to_group}").html_safe % { link_to_group: link_to_group }
- - if search_service.show_sort_dropdown?
+ - if @search_service_presenter.show_sort_dropdown?
.gl-md-display-flex.gl-flex-direction-column
#js-search-sort{ data: { "search-sort-options" => search_sort_options.to_json } }
%hr.gl-mb-5.gl-mt-0.gl-border-gray-100.gl-w-full
diff --git a/app/views/search/show.html.haml b/app/views/search/show.html.haml
index e1efa271d57..f8b1eb8bc64 100644
--- a/app/views/search/show.html.haml
+++ b/app/views/search/show.html.haml
@@ -9,7 +9,7 @@
- project_attributes = @project&.attributes&.slice('id', 'namespace_id', 'name')&.merge(name_with_namespace: @project&.name_with_namespace)
- if @search_results
- - if @search_service.without_count?
+ - if @search_service_presenter.without_count?
- page_description(_("%{scope} results for term '%{term}'") % { scope: @scope, term: @search_term })
- else
- page_description(_("%{count} %{scope} for term '%{term}'") % { count: @search_results.formatted_count(@scope), scope: @scope, term: @search_term })
diff --git a/config/feature_flags/development/send_traversal_ids_to_indexer.yml b/config/feature_flags/development/send_traversal_ids_to_indexer.yml
new file mode 100644
index 00000000000..65af6797391
--- /dev/null
+++ b/config/feature_flags/development/send_traversal_ids_to_indexer.yml
@@ -0,0 +1,8 @@
+---
+name: send_traversal_ids_to_indexer
+introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107352
+rollout_issue_url: https://gitlab.com/gitlab-org/gitlab/-/issues/386465
+milestone: '15.8'
+type: development
+group: group::global search
+default_enabled: true
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 373ee742ba6..0e05044f2c1 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -11184,7 +11184,7 @@ CI/CD variables for a project.
| <a id="cirunnerdescription"></a>`description` | [`String`](#string) | Description of the runner. |
| <a id="cirunnereditadminurl"></a>`editAdminUrl` | [`String`](#string) | Admin form URL of the runner. Only available for administrators. |
| <a id="cirunnerexecutorname"></a>`executorName` | [`String`](#string) | Executor last advertised by the runner. |
-| <a id="cirunnergroups"></a>`groups` | [`GroupConnection`](#groupconnection) | Types::GroupConnection. (see [Connections](#connections)) |
+| <a id="cirunnergroups"></a>`groups` | [`GroupConnection`](#groupconnection) | Groups the runner is associated with. For group runners only. (see [Connections](#connections)) |
| <a id="cirunnerid"></a>`id` | [`CiRunnerID!`](#cirunnerid) | ID of the runner. |
| <a id="cirunneripaddress"></a>`ipAddress` | [`String`](#string) | IP address of the runner. |
| <a id="cirunnerjobcount"></a>`jobCount` | [`Int`](#int) | Number of jobs processed by the runner (limited to 1000, plus one to indicate that more items exist). |
diff --git a/lib/backup/database.rb b/lib/backup/database.rb
index cf19b4fa8ff..c82d84a6b5d 100644
--- a/lib/backup/database.rb
+++ b/lib/backup/database.rb
@@ -6,7 +6,7 @@ module Backup
class Database < Task
extend ::Gitlab::Utils::Override
include Backup::Helper
- attr_reader :force, :config
+ attr_reader :force
IGNORED_ERRORS = [
# Ignore warnings
@@ -18,98 +18,108 @@ module Backup
].freeze
IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
- def initialize(database_name, progress, force:)
+ def initialize(progress, force:)
super(progress)
- @database_name = database_name
- @config = base_model.connection_db_config.configuration_hash
@force = force
end
override :dump
- def dump(db_file_name, backup_id)
- FileUtils.mkdir_p(File.dirname(db_file_name))
- FileUtils.rm_f(db_file_name)
- compress_rd, compress_wr = IO.pipe
- compress_pid = spawn(gzip_cmd, in: compress_rd, out: [db_file_name, 'w', 0600])
- compress_rd.close
-
- dump_pid =
- case config[:adapter]
- when "postgresql" then
- progress.print "Dumping PostgreSQL database #{database} ... "
- pg_env
- pgsql_args = ["--clean"] # Pass '--clean' to include 'DROP TABLE' statements in the DB dump.
- pgsql_args << '--if-exists'
-
- if Gitlab.config.backup.pg_schema
- pgsql_args << '-n'
- pgsql_args << Gitlab.config.backup.pg_schema
+ def dump(destination_dir, backup_id)
+ snapshot_ids = base_models_for_backup.each_with_object({}) do |(database_name, base_model), snapshot_ids|
+ base_model.connection.begin_transaction(isolation: :repeatable_read)
- Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
- pgsql_args << '-n'
- pgsql_args << schema.to_s
- end
- end
+ snapshot_ids[database_name] =
+ base_model.connection.execute("SELECT pg_export_snapshot() as snapshot_id;").first['snapshot_id']
+ end
+
+ FileUtils.mkdir_p(destination_dir)
+
+ snapshot_ids.each do |database_name, snapshot_id|
+ base_model = base_models_for_backup[database_name]
+
+ config = base_model.connection_db_config.configuration_hash
+
+ db_file_name = file_name(destination_dir, database_name)
+ FileUtils.rm_f(db_file_name)
+
+ pg_database = config[:database]
- Process.spawn('pg_dump', *pgsql_args, database, out: compress_wr)
+ progress.print "Dumping PostgreSQL database #{pg_database} ... "
+ pg_env(config)
+ pgsql_args = ["--clean"] # Pass '--clean' to include 'DROP TABLE' statements in the DB dump.
+ pgsql_args << '--if-exists'
+ pgsql_args << "--snapshot=#{snapshot_ids[database_name]}"
+
+ if Gitlab.config.backup.pg_schema
+ pgsql_args << '-n'
+ pgsql_args << Gitlab.config.backup.pg_schema
+
+ Gitlab::Database::EXTRA_SCHEMAS.each do |schema|
+ pgsql_args << '-n'
+ pgsql_args << schema.to_s
+ end
end
- compress_wr.close
- success = [compress_pid, dump_pid].all? do |pid|
- Process.waitpid(pid)
- $?.success?
- end
+ success = Backup::Dump::Postgres.new.dump(pg_database, db_file_name, pgsql_args)
+
+ base_model.connection.rollback_transaction
- report_success(success)
- progress.flush
+ raise DatabaseBackupError.new(config, db_file_name) unless success
- raise DatabaseBackupError.new(config, db_file_name) unless success
+ report_success(success)
+ progress.flush
+ end
end
override :restore
- def restore(db_file_name)
- unless File.exist?(db_file_name)
- raise(Backup::Error, "Source database file does not exist #{db_file_name}") if main_database?
+ def restore(destination_dir)
+ base_models_for_backup.each do |database_name, base_model|
+ config = base_model.connection_db_config.configuration_hash
- progress.puts "Source backup for the database #{@database_name} doesn't exist. Skipping the task"
- return
- end
+ db_file_name = file_name(destination_dir, database_name)
+ database = config[:database]
- unless force
- progress.puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
- sleep(5)
- end
+ unless File.exist?(db_file_name)
+ raise(Backup::Error, "Source database file does not exist #{db_file_name}") if main_database?(database_name)
- # Drop all tables Load the schema to ensure we don't have any newer tables
- # hanging out from a failed upgrade
- puts_time 'Cleaning the database ... '.color(:blue)
- Rake::Task['gitlab:db:drop_tables'].invoke
- puts_time 'done'.color(:green)
-
- decompress_rd, decompress_wr = IO.pipe
- decompress_pid = spawn(*%w(gzip -cd), out: decompress_wr, in: db_file_name)
- decompress_wr.close
-
- status, @errors =
- case config[:adapter]
- when "postgresql" then
- progress.print "Restoring PostgreSQL database #{database} ... "
- pg_env
- execute_and_track_errors(pg_restore_cmd, decompress_rd)
+ progress.puts "Source backup for the database #{@database_name} doesn't exist. Skipping the task"
+ return false
end
- decompress_rd.close
- Process.waitpid(decompress_pid)
- success = $?.success? && status.success?
+ unless force
+ progress.puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
+ sleep(5)
+ end
- if @errors.present?
- progress.print "------ BEGIN ERRORS -----\n".color(:yellow)
- progress.print @errors.join.color(:yellow)
- progress.print "------ END ERRORS -------\n".color(:yellow)
- end
+ # Drop all tables Load the schema to ensure we don't have any newer tables
+ # hanging out from a failed upgrade
+ drop_tables(database_name)
+
+ decompress_rd, decompress_wr = IO.pipe
+ decompress_pid = spawn(*%w(gzip -cd), out: decompress_wr, in: db_file_name)
+ decompress_wr.close
+
+ status, @errors =
+ case config[:adapter]
+ when "postgresql" then
+ progress.print "Restoring PostgreSQL database #{database} ... "
+ pg_env(config)
+ execute_and_track_errors(pg_restore_cmd(database), decompress_rd)
+ end
+ decompress_rd.close
+
+ Process.waitpid(decompress_pid)
+ success = $?.success? && status.success?
- report_success(success)
- raise Backup::Error, 'Restore failed' unless success
+ if @errors.present?
+ progress.print "------ BEGIN ERRORS -----\n".color(:yellow)
+ progress.print @errors.join.color(:yellow)
+ progress.print "------ END ERRORS -------\n".color(:yellow)
+ end
+
+ report_success(success)
+ raise Backup::Error, 'Restore failed' unless success
+ end
end
override :pre_restore_warning
@@ -144,16 +154,22 @@ module Backup
protected
- def database
- @config[:database]
+ def base_models_for_backup
+ @base_models_for_backup ||= Gitlab::Database.database_base_models
end
- def base_model
- Gitlab::Database.database_base_models[@database_name]
+ def main_database?(database_name)
+ database_name.to_sym == :main
end
- def main_database?
- @database_name == :main
+ def file_name(base_dir, database_name)
+ prefix = if database_name.to_sym != :main
+ "#{database_name}_"
+ else
+ ''
+ end
+
+ File.join(base_dir, "#{prefix}database.sql.gz")
end
def ignore_error?(line)
@@ -189,7 +205,7 @@ module Backup
end
end
- def pg_env
+ def pg_env(config)
args = {
username: 'PGUSER',
host: 'PGHOST',
@@ -223,7 +239,20 @@ module Backup
private
- def pg_restore_cmd
+ def drop_tables(database_name)
+ if Rake::Task.task_defined? "gitlab:db:drop_tables:#{database_name}"
+ puts_time 'Cleaning the database ... '.color(:blue)
+ Rake::Task["gitlab:db:drop_tables:#{database_name}"].invoke
+ puts_time 'done'.color(:green)
+ elsif !Gitlab::Database.has_config?(:ci)
+ # In single database, we do not have rake tasks per database
+ puts_time 'Cleaning the database ... '.color(:blue)
+ Rake::Task["gitlab:db:drop_tables"].invoke
+ puts_time 'done'.color(:green)
+ end
+ end
+
+ def pg_restore_cmd(database)
['psql', database]
end
end
diff --git a/lib/backup/dump/postgres.rb b/lib/backup/dump/postgres.rb
new file mode 100644
index 00000000000..c07e2c2928a
--- /dev/null
+++ b/lib/backup/dump/postgres.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+module Backup
+ module Dump
+ class Postgres
+ include Backup::Helper
+
+ def dump(database_name, output_file, pgsql_args)
+ compress_rd, compress_wr = IO.pipe
+ compress_pid = spawn(gzip_cmd, in: compress_rd, out: [output_file, 'w', 0o600])
+ compress_rd.close
+
+ dump_pid = Process.spawn('pg_dump', *pgsql_args, database_name, out: compress_wr)
+ compress_wr.close
+
+ [compress_pid, dump_pid].all? do |pid|
+ Process.waitpid(pid)
+ $?.success?
+ end
+ end
+ end
+ end
+end
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index f8424f6250e..a7dddcf8619 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -22,7 +22,6 @@ module Backup
:destination_optional, # `true` if the destination might not exist on a successful backup.
:cleanup_path, # Path to remove after a successful backup. Uses `destination_path` when not specified.
:task,
- :task_group,
keyword_init: true
) do
def enabled?
@@ -121,20 +120,11 @@ module Backup
def build_definitions # rubocop:disable Metrics/AbcSize
{
- 'main_db' => TaskDefinition.new(
- human_name: _('main_database'),
- destination_path: 'db/database.sql.gz',
+ 'db' => TaskDefinition.new(
+ human_name: _('database'),
+ destination_path: 'db',
cleanup_path: 'db',
- task: build_db_task(:main),
- task_group: 'db'
- ),
- 'ci_db' => TaskDefinition.new(
- human_name: _('ci_database'),
- destination_path: 'db/ci_database.sql.gz',
- cleanup_path: 'db',
- task: build_db_task(:ci),
- enabled: Gitlab::Database.has_config?(:ci),
- task_group: 'db'
+ task: build_db_task
),
'repositories' => TaskDefinition.new(
human_name: _('repositories'),
@@ -186,11 +176,10 @@ module Backup
}.freeze
end
- def build_db_task(database_name)
- return unless Gitlab::Database.has_config?(database_name) # It will be disabled for a single db setup
-
+ def build_db_task
force = Gitlab::Utils.to_boolean(ENV['force'], default: false)
- Database.new(database_name, progress, force: force)
+
+ Database.new(progress, force: force)
end
def build_repositories_task
@@ -483,7 +472,7 @@ module Backup
end
def skipped?(item)
- skipped.include?(item) || skipped.include?(definitions[item]&.task_group)
+ skipped.include?(item)
end
def skipped
diff --git a/lib/tasks/gitlab/backup.rake b/lib/tasks/gitlab/backup.rake
index 6647a10898f..787df37a8f8 100644
--- a/lib/tasks/gitlab/backup.rake
+++ b/lib/tasks/gitlab/backup.rake
@@ -44,15 +44,13 @@ namespace :gitlab do
namespace :db do
task create: :gitlab_environment do
lock do
- Backup::Manager.new(progress).run_create_task('main_db')
- Backup::Manager.new(progress).run_create_task('ci_db')
+ Backup::Manager.new(progress).run_create_task('db')
end
end
task restore: :gitlab_environment do
lock do
- Backup::Manager.new(progress).run_restore_task('main_db')
- Backup::Manager.new(progress).run_restore_task('ci_db')
+ Backup::Manager.new(progress).run_restore_task('db')
end
end
end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index cd3049ddfbc..fe528918fe6 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -49119,9 +49119,6 @@ msgstr ""
msgid "ciReport|is loading, errors when loading results"
msgstr ""
-msgid "ci_database"
-msgstr ""
-
msgid "closed"
msgstr ""
@@ -49205,6 +49202,9 @@ msgstr ""
msgid "data"
msgstr ""
+msgid "database"
+msgstr ""
+
msgid "date must not be after 9999-12-31"
msgstr ""
@@ -49645,9 +49645,6 @@ msgstr ""
msgid "locked by %{path_lock_user_name} %{created_at}"
msgstr ""
-msgid "main_database"
-msgstr ""
-
msgid "manual"
msgstr ""
diff --git a/scripts/allowed_warnings.txt b/scripts/allowed_warnings.txt
new file mode 100644
index 00000000000..8162f45f760
--- /dev/null
+++ b/scripts/allowed_warnings.txt
@@ -0,0 +1,11 @@
+# List of ignored warnings used by `fail_on_warnings` in `scripts/utils.sh`.
+# Each line represents a match used by `grep --invert-match --file`.
+# Comments and empty lines are ignored.
+
+# https://github.com/browserslist/browserslist/blob/d0ec62eb48c41c218478cd3ac28684df051cc865/node.js#L329
+# warns if caniuse-lite package is older than 6 months. Ignore this
+# warning message so that GitLab backports don't fail.
+Browserslist: caniuse-lite is outdated. Please run next command `yarn upgrade`
+
+# https://github.com/mime-types/mime-types-data/pull/50#issuecomment-1060908930
+Type application/netcdf is already registered as a variant of application/netcdf.
diff --git a/scripts/static-analysis b/scripts/static-analysis
index 9a0057d8f4d..0d03dd42c73 100755
--- a/scripts/static-analysis
+++ b/scripts/static-analysis
@@ -7,14 +7,7 @@ require_relative '../lib/gitlab/popen'
require_relative '../lib/gitlab/popen/runner'
class StaticAnalysis
- ALLOWED_WARNINGS = [
- # https://github.com/browserslist/browserslist/blob/d0ec62eb48c41c218478cd3ac28684df051cc865/node.js#L329
- # warns if caniuse-lite package is older than 6 months. Ignore this
- # warning message so that GitLab backports don't fail.
- "Browserslist: caniuse-lite is outdated. Please run next command `yarn upgrade`",
- # https://github.com/mime-types/mime-types-data/pull/50#issuecomment-1060908930
- "Type application/netcdf is already registered as a variant of application/netcdf."
- ].freeze
+ # `ALLOWED_WARNINGS` moved to scripts/allowed_warnings.txt
Task = Struct.new(:command, :duration) do
def cmd
@@ -94,12 +87,12 @@ class StaticAnalysis
if static_analysis.all_success_and_clean?
puts 'All static analyses passed successfully.'
elsif static_analysis.all_success?
- puts 'All static analyses passed successfully, but we have warnings:'
+ puts 'All static analyses passed successfully with warnings.'
puts
emit_warnings(static_analysis)
- exit 2 if warning_count(static_analysis).nonzero?
+ # We used to exit 2 on warnings but `fail_on_warnings` takes care of it now.
else
puts 'Some static analyses failed:'
@@ -112,11 +105,11 @@ class StaticAnalysis
def emit_warnings(static_analysis)
static_analysis.warned_results.each do |result|
- puts
- puts "**** #{result.cmd.join(' ')} had the following warning(s):"
- puts
- puts result.stderr
- puts
+ warn
+ warn "**** #{result.cmd.join(' ')} had the following warning(s):"
+ warn
+ warn result.stderr
+ warn
end
end
@@ -131,11 +124,6 @@ class StaticAnalysis
end
end
- def warning_count(static_analysis)
- static_analysis.warned_results
- .count { |result| !ALLOWED_WARNINGS.include?(result.stderr.strip) } # rubocop:disable Rails/NegateInclude
- end
-
def tasks_to_run(node_total)
total_time = TASKS_WITH_DURATIONS_SECONDS.sum(&:duration).to_f
ideal_time_per_node = total_time / node_total
diff --git a/scripts/utils.sh b/scripts/utils.sh
index c9e4a6a487d..2bbe7e10de8 100644
--- a/scripts/utils.sh
+++ b/scripts/utils.sh
@@ -107,6 +107,37 @@ function install_junit_merge_gem() {
run_timed_command "gem install junit_merge --no-document --version 0.1.2"
}
+function fail_on_warnings() {
+ local cmd="$*"
+ local warning_file
+ warning_file="$(mktemp)"
+
+ local allowed_warning_file
+ allowed_warning_file="$(mktemp)"
+
+ eval "$cmd 2>$warning_file"
+ local ret=$?
+
+ # Filter out comments and empty lines from allowed warnings file.
+ grep --invert-match --extended-regexp "^#|^$" scripts/allowed_warnings.txt > "$allowed_warning_file"
+
+ local warnings
+ # Filter out allowed warnings from stderr.
+ # Turn grep errors into warnings so we fail later.
+ warnings=$(grep --invert-match --file "$allowed_warning_file" "$warning_file" 2>&1 || true)
+
+ rm -f "$warning_file" "$allowed_warning_file"
+
+ if [ "$warnings" != "" ]
+ then
+ echoerr "There were warnings:"
+ echoerr "$warnings"
+ return 1
+ fi
+
+ return $ret
+}
+
function run_timed_command() {
local cmd="${1}"
local metric_name="${2:-no}"
diff --git a/spec/fixtures/database.sql.gz b/spec/fixtures/database.sql.gz
new file mode 100644
index 00000000000..a98aa7c53f2
--- /dev/null
+++ b/spec/fixtures/database.sql.gz
Binary files differ
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/database_spec.rb
index ed5b34b7f8c..c4e8b903aa0 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/database_spec.rb
@@ -2,11 +2,13 @@
require 'spec_helper'
-RSpec.describe Backup::Database do
+RSpec.describe Backup::Database, feature_category: :backup_restore do
let(:progress) { StringIO.new }
let(:output) { progress.string }
+ let(:single_database) { !Gitlab::Database.has_config?(:ci) }
before(:all) do
+ Rake::Task.define_task(:environment)
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/backup'
Rake.application.rake_require 'tasks/gitlab/shell'
@@ -14,14 +16,110 @@ RSpec.describe Backup::Database do
Rake.application.rake_require 'tasks/cache'
end
+ describe '#dump', :delete do
+ let(:backup_id) { 'some_id' }
+ let(:force) { true }
+
+ subject { described_class.new(progress, force: force) }
+
+ before do
+ Gitlab::Database.database_base_models.each do |database_name, base_model|
+ base_model.connection.rollback_transaction unless base_model.connection.open_transactions.zero?
+ end
+ end
+
+ it 'creates gzipped database dumps' do
+ Dir.mktmpdir do |dir|
+ subject.dump(dir, backup_id)
+
+ Gitlab::Database.database_base_models.each_key do |database_name|
+ filename = database_name == 'main' ? 'database.sql.gz' : "#{database_name}_database.sql.gz"
+ expect(File.exist?(File.join(dir, filename))).to eq(true)
+ end
+ end
+ end
+
+ it 'uses snapshots' do
+ Dir.mktmpdir do |dir|
+ base_model = Gitlab::Database.database_base_models['main']
+ expect(base_model.connection).to receive(:begin_transaction).with(
+ isolation: :repeatable_read
+ ).and_call_original
+ expect(base_model.connection).to receive(:execute).with(
+ "SELECT pg_export_snapshot() as snapshot_id;"
+ ).and_call_original
+ expect(base_model.connection).to receive(:rollback_transaction).and_call_original
+
+ subject.dump(dir, backup_id)
+ end
+ end
+
+ describe 'pg_dump arguments' do
+ let(:snapshot_id) { 'fake_id' }
+ let(:pg_args) do
+ [
+ '--clean',
+ '--if-exists',
+ "--snapshot=#{snapshot_id}"
+ ]
+ end
+
+ let(:dumper) { double }
+ let(:destination_dir) { 'tmp' }
+
+ before do
+ allow(Backup::Dump::Postgres).to receive(:new).and_return(dumper)
+ allow(dumper).to receive(:dump).with(any_args).and_return(true)
+
+ Gitlab::Database.database_base_models.each do |database_name, base_model|
+ allow(base_model.connection).to receive(:execute).with(
+ "SELECT pg_export_snapshot() as snapshot_id;"
+ ).and_return(['snapshot_id' => snapshot_id])
+ end
+ end
+
+ it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
+ expect(dumper).to receive(:dump).with(anything, anything, pg_args)
+
+ subject.dump(destination_dir, backup_id)
+ end
+
+ context 'when a PostgreSQL schema is used' do
+ let(:schema) { 'gitlab' }
+ let(:additional_args) do
+ pg_args + ['-n', schema] + Gitlab::Database::EXTRA_SCHEMAS.map do |schema|
+ ['-n', schema.to_s]
+ end.flatten
+ end
+
+ before do
+ allow(Gitlab.config.backup).to receive(:pg_schema).and_return(schema)
+ end
+
+ it 'calls Backup::Dump::Postgres with correct pg_dump arguments' do
+ expect(dumper).to receive(:dump).with(anything, anything, additional_args)
+
+ subject.dump(destination_dir, backup_id)
+ end
+ end
+ end
+ end
+
describe '#restore' do
let(:cmd) { %W[#{Gem.ruby} -e $stdout.puts(1)] }
- let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
+ let(:backup_dir) { Rails.root.join("spec/fixtures/") }
let(:force) { true }
- subject { described_class.new(Gitlab::Database::MAIN_DATABASE_NAME.to_sym, progress, force: force) }
+ subject { described_class.new(progress, force: force) }
before do
+ if single_database
+ allow(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ else
+ allow(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
+ allow(Rake::Task['gitlab:db:drop_tables:ci']).to receive(:invoke)
+ end
+
allow(subject).to receive(:pg_restore_cmd).and_return(cmd)
end
@@ -30,9 +128,14 @@ RSpec.describe Backup::Database do
it 'warns the user and waits' do
expect(subject).to receive(:sleep)
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
- subject.restore(data)
+ if single_database
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ else
+ expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
+ end
+
+ subject.restore(backup_dir)
expect(output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
end
@@ -43,12 +146,14 @@ RSpec.describe Backup::Database do
end
context 'with an empty .gz file' do
- let(:data) { Rails.root.join("spec/fixtures/pages_empty.tar.gz").to_s }
-
it 'returns successfully' do
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ if single_database
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ else
+ expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
+ end
- subject.restore(data)
+ subject.restore(backup_dir)
expect(output).to include("Restoring PostgreSQL database")
expect(output).to include("[DONE]")
@@ -57,12 +162,18 @@ RSpec.describe Backup::Database do
end
context 'with a corrupted .gz file' do
- let(:data) { Rails.root.join("spec/fixtures/big-image.png").to_s }
+ before do
+ allow(subject).to receive(:file_name).and_return("#{backup_dir}big-image.png")
+ end
it 'raises a backup error' do
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ if single_database
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ else
+ expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
+ end
- expect { subject.restore(data) }.to raise_error(Backup::Error)
+ expect { subject.restore(backup_dir) }.to raise_error(Backup::Error)
end
end
@@ -72,9 +183,13 @@ RSpec.describe Backup::Database do
let(:cmd) { %W[#{Gem.ruby} -e $stderr.write("#{noise}#{visible_error}")] }
it 'filters out noise from errors and has a post restore warning' do
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ if single_database
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ else
+ expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
+ end
- subject.restore(data)
+ subject.restore(backup_dir)
expect(output).to include("ERRORS")
expect(output).not_to include(noise)
@@ -95,9 +210,13 @@ RSpec.describe Backup::Database do
end
it 'overrides default config values' do
- expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ if single_database
+ expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
+ else
+ expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
+ end
- subject.restore(data)
+ subject.restore(backup_dir)
expect(output).to include(%("PGHOST"=>"test.example.com"))
expect(output).to include(%("PGPASSWORD"=>"donotchange"))
@@ -107,22 +226,30 @@ RSpec.describe Backup::Database do
end
context 'when the source file is missing' do
- let(:main_database) { described_class.new(Gitlab::Database::MAIN_DATABASE_NAME.to_sym, progress, force: force) }
- let(:ci_database) { described_class.new(Gitlab::Database::CI_DATABASE_NAME.to_sym, progress, force: force) }
- let(:missing_file) { Rails.root.join("spec/fixtures/missing_file.tar.gz").to_s }
+ context 'for main database' do
+ before do
+ allow(File).to receive(:exist?).and_call_original
+ allow(File).to receive(:exist?).with("#{backup_dir}database.sql.gz").and_return(false)
+ allow(File).to receive(:exist?).with("#{backup_dir}ci_database.sql.gz").and_return(false)
+ end
- it 'main database raises an error about missing source file' do
- expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke)
+ it 'raises an error about missing source file' do
+ if single_database
+ expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke)
+ else
+ expect(Rake::Task['gitlab:db:drop_tables:main']).not_to receive(:invoke)
+ end
- expect do
- main_database.restore(missing_file)
- end.to raise_error(Backup::Error, /Source database file does not exist/)
+ expect do
+ subject.restore('db')
+ end.to raise_error(Backup::Error, /Source database file does not exist/)
+ end
end
- it 'ci database tolerates missing source file' do
- expect(Rake::Task['gitlab:db:drop_tables']).not_to receive(:invoke)
- skip_if_multiple_databases_not_setup
- expect { ci_database.restore(missing_file) }.not_to raise_error
+ context 'for ci database' do
+ it 'ci database tolerates missing source file' do
+ expect { subject.restore(backup_dir) }.not_to raise_error
+ end
end
end
end
diff --git a/spec/lib/backup/dump/postgres_spec.rb b/spec/lib/backup/dump/postgres_spec.rb
new file mode 100644
index 00000000000..f6a68ab6db9
--- /dev/null
+++ b/spec/lib/backup/dump/postgres_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Dump::Postgres, feature_category: :backup_restore do
+ describe '#dump' do
+ let(:pg_database) { 'gitlabhq_test' }
+ let(:destination_dir) { Dir.mktmpdir }
+ let(:db_file_name) { File.join(destination_dir, 'output.gz') }
+
+ let(:pipes) { IO.pipe }
+ let(:gzip_pid) { spawn('gzip -c -1', in: pipes[0], out: [db_file_name, 'w', 0o600]) }
+ let(:pg_dump_pid) { Process.spawn('pg_dump', *args, pg_database, out: pipes[1]) }
+ let(:args) { ['--help'] }
+
+ subject { described_class.new }
+
+ before do
+ allow(IO).to receive(:pipe).and_return(pipes)
+ end
+
+ after do
+ FileUtils.remove_entry destination_dir
+ end
+
+ it 'creates gzipped dump using supplied arguments' do
+ expect(subject).to receive(:spawn).with('gzip -c -1', in: pipes.first,
+ out: [db_file_name, 'w', 0o600]).and_return(gzip_pid)
+ expect(Process).to receive(:spawn).with('pg_dump', *args, pg_database, out: pipes[1]).and_return(pg_dump_pid)
+
+ subject.dump(pg_database, db_file_name, args)
+
+ expect(File.exist?(db_file_name)).to eq(true)
+ end
+ end
+end
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 992dbec73c2..02889c1535d 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Backup::Manager do
+RSpec.describe Backup::Manager, feature_category: :backup_restore do
include StubENV
let(:progress) { StringIO.new }
@@ -30,8 +30,7 @@ RSpec.describe Backup::Manager do
task: task,
enabled: enabled,
destination_path: 'my_task.tar.gz',
- human_name: 'my task',
- task_group: 'group1'
+ human_name: 'my task'
)
}
end
@@ -63,16 +62,6 @@ RSpec.describe Backup::Manager do
subject.run_create_task('my_task')
end
end
-
- describe 'task group skipped' do
- it 'informs the user' do
- stub_env('SKIP', 'group1')
-
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping my task ... [SKIPPED]')
-
- subject.run_create_task('my_task')
- end
- end
end
describe '#run_restore_task' do
diff --git a/spec/models/concerns/triggerable_hooks_spec.rb b/spec/models/concerns/triggerable_hooks_spec.rb
index b702454f432..5682a189c41 100644
--- a/spec/models/concerns/triggerable_hooks_spec.rb
+++ b/spec/models/concerns/triggerable_hooks_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe TriggerableHooks do
stub_const('TestableHook', Class.new(WebHook))
TestableHook.class_eval do
- include TriggerableHooks # rubocop:disable Rspec/DescribedClass
+ include TriggerableHooks # rubocop:disable RSpec/DescribedClass
triggerable_hooks [:push_hooks]
end
end
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index f1e5533139e..f8dca03df15 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::CreateService do
+RSpec.describe BulkImports::CreateService, feature_category: :importers do
let(:user) { create(:user) }
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
let(:destination_group) { create(:group, path: 'destination1') }
@@ -102,6 +102,22 @@ RSpec.describe BulkImports::CreateService do
expect(result.message).to eq("Validation failed: Source full path can't be blank")
end
+ context 'when the token is invalid' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:instance_version).and_raise(BulkImports::NetworkError, "401 Unauthorized")
+ end
+ end
+
+ it 'rescues the error and raises a ServiceResponse::Error' do
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("401 Unauthorized")
+ end
+ end
+
describe '#user-role' do
context 'when there is a parent_namespace and the user is a member' do
let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index dc74f25db87..4aa6edf4789 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -2,14 +2,12 @@
require 'rake_helper'
-RSpec.describe 'gitlab:app namespace rake task', :delete do
+RSpec.describe 'gitlab:app namespace rake task', :delete, feature_category: :backup_restore do
let(:enable_registry) { true }
let(:backup_restore_pid_path) { "#{Rails.application.root}/tmp/backup_restore.pid" }
let(:backup_tasks) { %w[db repo uploads builds artifacts pages lfs terraform_state registry packages] }
let(:backup_types) do
- %w[main_db repositories uploads builds artifacts pages lfs terraform_state registry packages].tap do |array|
- array.insert(1, 'ci_db') if Gitlab::Database.has_config?(:ci)
- end
+ %w[db repositories uploads builds artifacts pages lfs terraform_state registry packages]
end
def tars_glob
@@ -94,7 +92,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
let(:pid_file) { instance_double(File, write: 12345) }
where(:tasks_name, :rake_task) do
- %w[main_db ci_db] | 'gitlab:backup:db:restore'
+ 'db' | 'gitlab:backup:db:restore'
'repositories' | 'gitlab:backup:repo:restore'
'builds' | 'gitlab:backup:builds:restore'
'uploads' | 'gitlab:backup:uploads:restore'
@@ -260,9 +258,7 @@ RSpec.describe 'gitlab:app namespace rake task', :delete do
end
it 'logs the progress to log file' do
- ci_database_status = Gitlab::Database.has_config?(:ci) ? "[SKIPPED]" : "[DISABLED]"
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping main_database ... [SKIPPED]")
- expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping ci_database ... #{ci_database_status}")
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping database ... [SKIPPED]")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... ")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping repositories ... done")
expect(Gitlab::BackupLogger).to receive(:info).with(message: "Dumping uploads ... ")
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index e81462ee518..de994a0da2b 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'search/_results' do
+RSpec.describe 'search/_results', feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
@@ -11,7 +11,6 @@ RSpec.describe 'search/_results' do
let(:scope) { 'issues' }
let(:term) { 'foo' }
let(:search_results) { instance_double('Gitlab::SearchResults', { formatted_count: 10, current_user: user } ) }
- let(:search_service) { class_double(SearchServicePresenter, scope: scope, search: term, current_user: user) }
before do
controller.params[:action] = 'show'
@@ -20,6 +19,7 @@ RSpec.describe 'search/_results' do
create_list(:issue, 3)
allow(view).to receive(:current_user) { user }
+
assign(:search_count_path, 'test count link')
assign(:search_path, 'link test')
assign(:search_results, search_results)
@@ -27,8 +27,9 @@ RSpec.describe 'search/_results' do
assign(:search_term, term)
assign(:scope, scope)
- @search_service = SearchServicePresenter.new(SearchService.new(user, search: term, scope: scope))
- allow(@search_service).to receive(:search_objects).and_return(search_objects)
+ search_service_presenter = SearchServicePresenter.new(SearchService.new(user, search: term, scope: scope))
+ allow(search_service_presenter).to receive(:search_objects).and_return(search_objects)
+ assign(:search_service_presenter, search_service_presenter)
end
where(search_page_vertical_nav_enabled: [true, false])
diff --git a/spec/views/search/show.html.haml_spec.rb b/spec/views/search/show.html.haml_spec.rb
index 26ec2c6ae74..6adb2c77c4d 100644
--- a/spec/views/search/show.html.haml_spec.rb
+++ b/spec/views/search/show.html.haml_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'search/show', feature_category: :global_search do
stub_template "search/_category.html.haml" => 'Category Partial'
stub_template "search/_results.html.haml" => 'Results Partial'
- assign(:search_service, search_service_presenter)
+ assign(:search_service_presenter, search_service_presenter)
end
context 'search_page_vertical_nav feature flag enabled' do