Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2024-01-23 18:08:36 +0300
committerGitLab Bot <gitlab-bot@gitlab.com>2024-01-23 18:08:36 +0300
commit118083ac69c8cba0bc60633a15b9bb44e5f78281 (patch)
tree1e00c72fa30c952c9a8d86b3441132f037b3fc21
parent84b507d17bad7636a02ae2e9f59e8eb219ad7e15 (diff)
Add latest changes from gitlab-org/gitlab@master
-rw-r--r--.rubocop_todo/gitlab/doc_url.yml1
-rw-r--r--.rubocop_todo/layout/line_length.yml3
-rw-r--r--.rubocop_todo/layout/space_in_lambda_literal.yml1
-rw-r--r--.rubocop_todo/lint/unused_method_argument.yml4
-rw-r--r--.rubocop_todo/rails/file_path.yml1
-rw-r--r--.rubocop_todo/rspec/any_instance_of.yml1
-rw-r--r--.rubocop_todo/rspec/context_wording.yml2
-rw-r--r--.rubocop_todo/rspec/feature_category.yml2
-rw-r--r--.rubocop_todo/rspec/named_subject.yml7
-rw-r--r--.rubocop_todo/rspec/return_from_stub.yml1
-rw-r--r--.rubocop_todo/rspec/verified_doubles.yml2
-rw-r--r--.rubocop_todo/style/empty_method.yml1
-rw-r--r--.rubocop_todo/style/explicit_block_argument.yml1
-rw-r--r--.rubocop_todo/style/guard_clause.yml1
-rw-r--r--.rubocop_todo/style/inline_disable_annotation.yml2
-rw-r--r--.rubocop_todo/style/numeric_literal_prefix.yml1
-rw-r--r--.rubocop_todo/style/redundant_interpolation.yml1
-rw-r--r--.rubocop_todo/style/string_concatenation.yml3
-rw-r--r--.rubocop_todo/style/symbol_proc.yml1
-rw-r--r--app/assets/javascripts/members/components/avatars/user_avatar.vue1
-rw-r--r--app/assets/javascripts/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue9
-rw-r--r--app/assets/javascripts/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql1
-rw-r--r--app/assets/javascripts/user_popovers.js3
-rw-r--r--app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue4
-rw-r--r--app/assets/javascripts/work_items/components/work_item_assignees_with_edit.vue10
-rw-r--r--app/graphql/types/projects/branch_rule_type.rb4
-rw-r--r--app/models/commit.rb4
-rw-r--r--app/models/container_registry/protection/rule.rb4
-rw-r--r--app/models/merge_request.rb33
-rw-r--r--app/services/import/github_service.rb26
-rw-r--r--app/views/devise/shared/_footer.html.haml2
-rw-r--r--app/views/projects/merge_requests/_page.html.haml4
-rw-r--r--app/views/projects/merge_requests/widget/_commit_change_content.html.haml4
-rw-r--r--db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_artifact.yml2
-rw-r--r--db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml2
-rw-r--r--db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_metadata.yml2
-rw-r--r--db/post_migrate/20240122092018_drop_index_from_ci_build_trace_metadata.rb17
-rw-r--r--db/post_migrate/20240122092139_remove_fk_from_ci_build_trace_metadata_and_ci_job_artifacts.rb36
-rw-r--r--db/post_migrate/20240122132856_finalize_backfill_partition_id_ci_pipeline_metadata.rb22
-rw-r--r--db/post_migrate/20240122133127_finalize_backfill_partition_id_ci_pipeline_artifact.rb22
-rw-r--r--db/post_migrate/20240122133457_finalize_backfill_partition_id_ci_pipeline_config.rb22
-rw-r--r--db/schema_migrations/202401220920181
-rw-r--r--db/schema_migrations/202401220921391
-rw-r--r--db/schema_migrations/202401221328561
-rw-r--r--db/schema_migrations/202401221331271
-rw-r--r--db/schema_migrations/202401221334571
-rw-r--r--db/structure.sql5
-rw-r--r--doc/administration/settings/jira_cloud_app.md2
-rw-r--r--doc/administration/settings/jira_cloud_app_troubleshooting.md4
-rw-r--r--doc/administration/settings/project_integration_management.md2
-rw-r--r--doc/administration/settings/slack_app.md8
-rw-r--r--doc/api/graphql/reference/index.md6
-rw-r--r--doc/ci/runners/saas/macos_saas_runner.md4
-rw-r--r--doc/development/sidekiq/worker_attributes.md15
-rw-r--r--doc/integration/jira/connect-app.md6
-rw-r--r--doc/user/group/epics/epic_boards.md4
-rw-r--r--doc/user/project/integrations/gitlab_slack_app_troubleshooting.md4
-rw-r--r--doc/user/project/integrations/gitlab_slack_application.md4
-rw-r--r--doc/user/project/integrations/index.md2
-rw-r--r--doc/user/project/issue_board.md4
-rw-r--r--doc/user/project/merge_requests/cherry_pick_changes.md9
-rw-r--r--doc/user/project/merge_requests/revert_changes.md7
-rw-r--r--lib/backup/database.rb277
-rw-r--r--lib/backup/files.rb163
-rw-r--r--lib/backup/manager.rb143
-rw-r--r--lib/backup/options.rb4
-rw-r--r--lib/backup/repositories.rb138
-rw-r--r--lib/backup/targets/database.rb283
-rw-r--r--lib/backup/targets/files.rb174
-rw-r--r--lib/backup/targets/repositories.rb145
-rw-r--r--lib/backup/targets/target.rb45
-rw-r--r--lib/backup/task.rb45
-rw-r--r--lib/backup/tasks/artifacts.rb25
-rw-r--r--lib/backup/tasks/builds.rb23
-rw-r--r--lib/backup/tasks/ci_secure_files.rb25
-rw-r--r--lib/backup/tasks/database.rb19
-rw-r--r--lib/backup/tasks/lfs.rb23
-rw-r--r--lib/backup/tasks/packages.rb25
-rw-r--r--lib/backup/tasks/pages.rb29
-rw-r--r--lib/backup/tasks/registry.rb25
-rw-r--r--lib/backup/tasks/repositories.rb40
-rw-r--r--lib/backup/tasks/task.rb42
-rw-r--r--lib/backup/tasks/terraform_state.rb25
-rw-r--r--lib/backup/tasks/uploads.rb25
-rw-r--r--locale/gitlab.pot12
-rw-r--r--scripts/review_apps/base-config.yaml2
-rw-r--r--spec/factories/gitlab/backup/options.rb11
-rw-r--r--spec/features/merge_request/user_reverts_merge_request_spec.rb18
-rw-r--r--spec/frontend/members/components/avatars/user_avatar_spec.js1
-rw-r--r--spec/frontend/members/mock_data.js1
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js28
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js2
-rw-r--r--spec/frontend/user_popovers_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js11
-rw-r--r--spec/frontend/work_items/components/work_item_assignees_with_edit_spec.js2
-rw-r--r--spec/lib/backup/files_spec.rb390
-rw-r--r--spec/lib/backup/gitaly_backup_spec.rb12
-rw-r--r--spec/lib/backup/manager_spec.rb188
-rw-r--r--spec/lib/backup/options_spec.rb16
-rw-r--r--spec/lib/backup/targets/database_spec.rb (renamed from spec/lib/backup/database_spec.rb)46
-rw-r--r--spec/lib/backup/targets/files_spec.rb403
-rw-r--r--spec/lib/backup/targets/repositories_spec.rb (renamed from spec/lib/backup/repositories_spec.rb)126
-rw-r--r--spec/lib/backup/targets/target_spec.rb36
-rw-r--r--spec/lib/backup/task_spec.rb22
-rw-r--r--spec/models/commit_spec.rb31
-rw-r--r--spec/models/merge_request_spec.rb95
-rw-r--r--spec/requests/api/import_github_spec.rb7
-rw-r--r--spec/services/import/github_service_spec.rb59
-rw-r--r--spec/support/helpers/database/duplicate_indexes.yml3
-rw-r--r--spec/support/rspec_order_todo.yml3
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb20
111 files changed, 2104 insertions, 1548 deletions
diff --git a/.rubocop_todo/gitlab/doc_url.yml b/.rubocop_todo/gitlab/doc_url.yml
index fbc58c436e8..05b5d155cbf 100644
--- a/.rubocop_todo/gitlab/doc_url.yml
+++ b/.rubocop_todo/gitlab/doc_url.yml
@@ -22,7 +22,6 @@ Gitlab/DocUrl:
- 'ee/lib/ee/gitlab/ci/pipeline/quota/size.rb'
- 'ee/lib/system_check/app/advanced_search_migrations_check.rb'
- 'ee/lib/tasks/gitlab/geo.rake'
- - 'lib/backup/database.rb'
- 'lib/feature.rb'
- 'lib/gitlab/audit/auditor.rb'
- 'lib/gitlab/ci/config/entry/processable.rb'
diff --git a/.rubocop_todo/layout/line_length.yml b/.rubocop_todo/layout/line_length.yml
index 17612d463e8..59f19430b57 100644
--- a/.rubocop_todo/layout/line_length.yml
+++ b/.rubocop_todo/layout/line_length.yml
@@ -2381,7 +2381,6 @@ Layout/LineLength:
- 'lib/atlassian/jira_connect/client.rb'
- 'lib/atlassian/jira_connect/serializers/repository_entity.rb'
- 'lib/backup.rb'
- - 'lib/backup/files.rb'
- 'lib/backup/gitaly_backup.rb'
- 'lib/backup/manager.rb'
- 'lib/banzai/filter/autolink_filter.rb'
@@ -3405,10 +3404,8 @@ Layout/LineLength:
- 'spec/lib/api/helpers_spec.rb'
- 'spec/lib/atlassian/jira_connect/client_spec.rb'
- 'spec/lib/atlassian/jira_issue_key_extractor_spec.rb'
- - 'spec/lib/backup/files_spec.rb'
- 'spec/lib/backup/gitaly_backup_spec.rb'
- 'spec/lib/backup/manager_spec.rb'
- - 'spec/lib/backup/repositories_spec.rb'
- 'spec/lib/banzai/commit_renderer_spec.rb'
- 'spec/lib/banzai/filter/ascii_doc_post_processing_filter_spec.rb'
- 'spec/lib/banzai/filter/ascii_doc_sanitization_filter_spec.rb'
diff --git a/.rubocop_todo/layout/space_in_lambda_literal.yml b/.rubocop_todo/layout/space_in_lambda_literal.yml
index bd901e40016..708944acbc7 100644
--- a/.rubocop_todo/layout/space_in_lambda_literal.yml
+++ b/.rubocop_todo/layout/space_in_lambda_literal.yml
@@ -360,7 +360,6 @@ Layout/SpaceInLambdaLiteral:
- 'spec/deprecation_toolkit_env.rb'
- 'spec/features/admin/users/admin_sees_unconfirmed_user_spec.rb'
- 'spec/helpers/namespaces_helper_spec.rb'
- - 'spec/lib/backup/gitaly_backup_spec.rb'
- 'spec/lib/container_registry/client_spec.rb'
- 'spec/lib/gitlab/analytics/date_filler_spec.rb'
- 'spec/lib/gitlab/background_migration/batched_migration_job_spec.rb'
diff --git a/.rubocop_todo/lint/unused_method_argument.yml b/.rubocop_todo/lint/unused_method_argument.yml
index 2c9d5c3da14..47448320010 100644
--- a/.rubocop_todo/lint/unused_method_argument.yml
+++ b/.rubocop_todo/lint/unused_method_argument.yml
@@ -280,7 +280,6 @@ Lint/UnusedMethodArgument:
- 'ee/lib/compliance_management/compliance_report/commit_loader.rb'
- 'ee/lib/ee/api/ci/helpers/runner.rb'
- 'ee/lib/ee/api/entities/project.rb'
- - 'ee/lib/ee/backup/repositories.rb'
- 'ee/lib/ee/gitlab/auth/ldap/sync/proxy.rb'
- 'ee/lib/ee/gitlab/geo_git_access.rb'
- 'ee/lib/ee/gitlab/tracking.rb'
@@ -328,9 +327,6 @@ Lint/UnusedMethodArgument:
- 'lib/api/projects_relation_builder.rb'
- 'lib/api/search.rb'
- 'lib/atlassian/jira_connect/client.rb'
- - 'lib/backup/database.rb'
- - 'lib/backup/files.rb'
- - 'lib/backup/gitaly_backup.rb'
- 'lib/banzai/filter/playable_link_filter.rb'
- 'lib/banzai/filter/references/abstract_reference_filter.rb'
- 'lib/banzai/filter/references/commit_range_reference_filter.rb'
diff --git a/.rubocop_todo/rails/file_path.yml b/.rubocop_todo/rails/file_path.yml
index 49d95cfeac3..0b96f15e24a 100644
--- a/.rubocop_todo/rails/file_path.yml
+++ b/.rubocop_todo/rails/file_path.yml
@@ -93,7 +93,6 @@ Rails/FilePath:
- 'spec/features/uploads/user_uploads_file_to_note_spec.rb'
- 'spec/helpers/blob_helper_spec.rb'
- 'spec/helpers/startupjs_helper_spec.rb'
- - 'spec/lib/backup/database_spec.rb'
- 'spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb'
- 'spec/lib/gitlab/database/schema_migrations/context_spec.rb'
- 'spec/lib/gitlab/feature_categories_spec.rb'
diff --git a/.rubocop_todo/rspec/any_instance_of.yml b/.rubocop_todo/rspec/any_instance_of.yml
index bac4094b61b..947b87e30a3 100644
--- a/.rubocop_todo/rspec/any_instance_of.yml
+++ b/.rubocop_todo/rspec/any_instance_of.yml
@@ -118,7 +118,6 @@ RSpec/AnyInstanceOf:
- 'spec/lib/api/entities/merge_request_basic_spec.rb'
- 'spec/lib/api/entities/merge_request_changes_spec.rb'
- 'spec/lib/api/helpers_spec.rb'
- - 'spec/lib/backup/files_spec.rb'
- 'spec/lib/backup/manager_spec.rb'
- 'spec/lib/banzai/commit_renderer_spec.rb'
- 'spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb'
diff --git a/.rubocop_todo/rspec/context_wording.yml b/.rubocop_todo/rspec/context_wording.yml
index 0fd9bdf250c..d5b2ba83599 100644
--- a/.rubocop_todo/rspec/context_wording.yml
+++ b/.rubocop_todo/rspec/context_wording.yml
@@ -284,7 +284,6 @@ RSpec/ContextWording:
- 'ee/spec/lib/ee/api/entities/user_with_admin_spec.rb'
- 'ee/spec/lib/ee/api/helpers/variables_helpers_spec.rb'
- 'ee/spec/lib/ee/api/helpers_spec.rb'
- - 'ee/spec/lib/ee/backup/repositories_spec.rb'
- 'ee/spec/lib/ee/gitlab/alert_management/payload/generic_spec.rb'
- 'ee/spec/lib/ee/gitlab/application_context_spec.rb'
- 'ee/spec/lib/ee/gitlab/auth/ldap/sync/group_spec.rb'
@@ -1466,7 +1465,6 @@ RSpec/ContextWording:
- 'spec/lib/atlassian/jira_connect_spec.rb'
- 'spec/lib/backup/gitaly_backup_spec.rb'
- 'spec/lib/backup/manager_spec.rb'
- - 'spec/lib/backup/repositories_spec.rb'
- 'spec/lib/banzai/color_parser_spec.rb'
- 'spec/lib/banzai/filter/absolute_link_filter_spec.rb'
- 'spec/lib/banzai/filter/asset_proxy_filter_spec.rb'
diff --git a/.rubocop_todo/rspec/feature_category.yml b/.rubocop_todo/rspec/feature_category.yml
index b4fa6a50025..a9cbafd04a7 100644
--- a/.rubocop_todo/rspec/feature_category.yml
+++ b/.rubocop_todo/rspec/feature_category.yml
@@ -2580,8 +2580,6 @@ RSpec/FeatureCategory:
- 'spec/lib/api/validations/validators/untrusted_regexp_spec.rb'
- 'spec/lib/backup/database_backup_error_spec.rb'
- 'spec/lib/backup/file_backup_error_spec.rb'
- - 'spec/lib/backup/files_spec.rb'
- - 'spec/lib/backup/task_spec.rb'
- 'spec/lib/banzai/filter/inline_diff_filter_spec.rb'
- 'spec/lib/banzai/pipeline/incident_management/timeline_event_pipeline_spec.rb'
- 'spec/lib/bitbucket/collection_spec.rb'
diff --git a/.rubocop_todo/rspec/named_subject.yml b/.rubocop_todo/rspec/named_subject.yml
index a14d560a3eb..13ea468885a 100644
--- a/.rubocop_todo/rspec/named_subject.yml
+++ b/.rubocop_todo/rspec/named_subject.yml
@@ -289,7 +289,6 @@ RSpec/NamedSubject:
- 'ee/spec/lib/ee/api/helpers/notes_helpers_spec.rb'
- 'ee/spec/lib/ee/api/helpers/scim_pagination_spec.rb'
- 'ee/spec/lib/ee/api/helpers_spec.rb'
- - 'ee/spec/lib/ee/backup/repositories_spec.rb'
- 'ee/spec/lib/ee/bulk_imports/groups/stage_spec.rb'
- 'ee/spec/lib/ee/bulk_imports/projects/stage_spec.rb'
- 'ee/spec/lib/ee/feature_spec.rb'
@@ -1686,13 +1685,9 @@ RSpec/NamedSubject:
- 'spec/lib/atlassian/jira_connect/serializers/repository_entity_spec.rb'
- 'spec/lib/atlassian/jira_connect/serializers/reviewer_entity_spec.rb'
- 'spec/lib/backup/database_backup_error_spec.rb'
- - 'spec/lib/backup/database_spec.rb'
- 'spec/lib/backup/dump/postgres_spec.rb'
- - 'spec/lib/backup/files_spec.rb'
- - 'spec/lib/backup/gitaly_backup_spec.rb'
- 'spec/lib/backup/manager_spec.rb'
- - 'spec/lib/backup/repositories_spec.rb'
- - 'spec/lib/backup/task_spec.rb'
+ - 'spec/lib/backup/gitaly_backup_spec.rb'
- 'spec/lib/banzai/color_parser_spec.rb'
- 'spec/lib/banzai/filter/broadcast_message_placeholders_filter_spec.rb'
- 'spec/lib/banzai/filter/math_filter_spec.rb'
diff --git a/.rubocop_todo/rspec/return_from_stub.yml b/.rubocop_todo/rspec/return_from_stub.yml
index 5aaa1847409..3cc9d67a7b3 100644
--- a/.rubocop_todo/rspec/return_from_stub.yml
+++ b/.rubocop_todo/rspec/return_from_stub.yml
@@ -105,7 +105,6 @@ RSpec/ReturnFromStub:
- 'spec/helpers/users/callouts_helper_spec.rb'
- 'spec/helpers/users_helper_spec.rb'
- 'spec/helpers/visibility_level_helper_spec.rb'
- - 'spec/lib/backup/files_spec.rb'
- 'spec/lib/banzai/filter/issuable_reference_expansion_filter_spec.rb'
- 'spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb'
- 'spec/lib/banzai/reference_parser/issue_parser_spec.rb'
diff --git a/.rubocop_todo/rspec/verified_doubles.yml b/.rubocop_todo/rspec/verified_doubles.yml
index 63bebcbb2df..0c52ae19d53 100644
--- a/.rubocop_todo/rspec/verified_doubles.yml
+++ b/.rubocop_todo/rspec/verified_doubles.yml
@@ -309,8 +309,6 @@ RSpec/VerifiedDoubles:
- 'spec/lib/api/helpers/variables_helpers_spec.rb'
- 'spec/lib/api/helpers_spec.rb'
- 'spec/lib/atlassian/jira_connect/client_spec.rb'
- - 'spec/lib/backup/files_spec.rb'
- - 'spec/lib/backup/repositories_spec.rb'
- 'spec/lib/banzai/cross_project_reference_spec.rb'
- 'spec/lib/banzai/filter/gollum_tags_filter_spec.rb'
- 'spec/lib/banzai/filter/repository_link_filter_spec.rb'
diff --git a/.rubocop_todo/style/empty_method.yml b/.rubocop_todo/style/empty_method.yml
index f8a054f1c86..f7120a87472 100644
--- a/.rubocop_todo/style/empty_method.yml
+++ b/.rubocop_todo/style/empty_method.yml
@@ -91,7 +91,6 @@ Style/EmptyMethod:
- 'lib/api/helpers/packages/conan/api_helpers.rb'
- 'lib/api/helpers/projects_helpers.rb'
- 'lib/api/projects_relation_builder.rb'
- - 'lib/backup/task.rb'
- 'lib/gitlab/alert_management/payload/base.rb'
- 'lib/gitlab/background_migration/backfill_iteration_cadence_id_for_boards.rb'
- 'lib/gitlab/background_migration/create_security_setting.rb'
diff --git a/.rubocop_todo/style/explicit_block_argument.yml b/.rubocop_todo/style/explicit_block_argument.yml
index 1e5725a2ef0..1eaaaf8af6a 100644
--- a/.rubocop_todo/style/explicit_block_argument.yml
+++ b/.rubocop_todo/style/explicit_block_argument.yml
@@ -19,7 +19,6 @@ Style/ExplicitBlockArgument:
- 'ee/app/services/gitlab_subscriptions/fetch_subscription_plans_service.rb'
- 'ee/app/services/group_saml/identity/destroy_service.rb'
- 'ee/app/services/security/security_orchestration_policies/base_merge_requests_service.rb'
- - 'ee/lib/ee/backup/repositories.rb'
- 'ee/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules.rb'
- 'ee/lib/gitlab/audit/events/preloader.rb'
- 'ee/lib/gitlab/ci/config/security_orchestration_policies/processor.rb'
diff --git a/.rubocop_todo/style/guard_clause.yml b/.rubocop_todo/style/guard_clause.yml
index 65847c093d5..933b33999e3 100644
--- a/.rubocop_todo/style/guard_clause.yml
+++ b/.rubocop_todo/style/guard_clause.yml
@@ -411,7 +411,6 @@ Style/GuardClause:
- 'lib/api/helpers/snippets_helpers.rb'
- 'lib/api/helpers/version.rb'
- 'lib/api/projects.rb'
- - 'lib/backup/files.rb'
- 'lib/banzai/filter/external_link_filter.rb'
- 'lib/banzai/filter/gollum_tags_filter.rb'
- 'lib/banzai/filter/references/merge_request_reference_filter.rb'
diff --git a/.rubocop_todo/style/inline_disable_annotation.yml b/.rubocop_todo/style/inline_disable_annotation.yml
index f6d56b477b7..b8132af6b62 100644
--- a/.rubocop_todo/style/inline_disable_annotation.yml
+++ b/.rubocop_todo/style/inline_disable_annotation.yml
@@ -1794,7 +1794,6 @@ Style/InlineDisableAnnotation:
- 'ee/lib/ee/api/project_milestones.rb'
- 'ee/lib/ee/api/projects.rb'
- 'ee/lib/ee/api/settings.rb'
- - 'ee/lib/ee/backup/repositories.rb'
- 'ee/lib/ee/banzai/filter/references/epic_reference_filter.rb'
- 'ee/lib/ee/banzai/reference_parser/epic_parser.rb'
- 'ee/lib/ee/gitlab/analytics/cycle_analytics/aggregated/base_query_builder.rb'
@@ -2833,7 +2832,6 @@ Style/InlineDisableAnnotation:
- 'spec/lib/api/base_spec.rb'
- 'spec/lib/api/entities/wiki_page_spec.rb'
- 'spec/lib/api/helpers/packages/npm_spec.rb'
- - 'spec/lib/backup/database_spec.rb'
- 'spec/lib/backup/manager_spec.rb'
- 'spec/lib/banzai/filter/footnote_filter_spec.rb'
- 'spec/lib/banzai/filter/image_link_filter_spec.rb'
diff --git a/.rubocop_todo/style/numeric_literal_prefix.yml b/.rubocop_todo/style/numeric_literal_prefix.yml
index 0004b0da84b..54dfdd203cd 100644
--- a/.rubocop_todo/style/numeric_literal_prefix.yml
+++ b/.rubocop_todo/style/numeric_literal_prefix.yml
@@ -15,7 +15,6 @@ Style/NumericLiteralPrefix:
- 'ee/spec/models/gitlab/seat_link_data_spec.rb'
- 'ee/spec/services/incident_management/oncall_schedules/update_service_spec.rb'
- 'ee/spec/workers/sync_seat_link_worker_spec.rb'
- - 'lib/backup/files.rb'
- 'lib/gitlab/import_export/command_line_util.rb'
- 'lib/gitlab/jwt_authenticatable.rb'
- 'lib/system_check/app/uploads_path_permission_check.rb'
diff --git a/.rubocop_todo/style/redundant_interpolation.yml b/.rubocop_todo/style/redundant_interpolation.yml
index d50db1cbbbd..c934d8660d1 100644
--- a/.rubocop_todo/style/redundant_interpolation.yml
+++ b/.rubocop_todo/style/redundant_interpolation.yml
@@ -3,7 +3,6 @@
Style/RedundantInterpolation:
Exclude:
- 'lib/backup/manager.rb'
- - 'lib/backup/task.rb'
- 'lib/gitlab/application_rate_limiter.rb'
- 'lib/gitlab/chat/responder/mattermost.rb'
- 'lib/gitlab/ci/build/releaser.rb'
diff --git a/.rubocop_todo/style/string_concatenation.yml b/.rubocop_todo/style/string_concatenation.yml
index b14f8236aac..b59c495ceb5 100644
--- a/.rubocop_todo/style/string_concatenation.yml
+++ b/.rubocop_todo/style/string_concatenation.yml
@@ -75,7 +75,6 @@ Style/StringConcatenation:
- 'ee/spec/tasks/gitlab/license_rake_spec.rb'
- 'lib/api/entities/tree_object.rb'
- 'lib/api/internal/kubernetes.rb'
- - 'lib/backup/files.rb'
- 'lib/backup/manager.rb'
- 'lib/banzai/filter/blockquote_fence_filter.rb'
- 'lib/banzai/filter/references/label_reference_filter.rb'
@@ -157,8 +156,6 @@ Style/StringConcatenation:
- 'spec/helpers/diff_helper_spec.rb'
- 'spec/helpers/search_helper_spec.rb'
- 'spec/lib/api/helpers/related_resources_helpers_spec.rb'
- - 'spec/lib/backup/gitaly_backup_spec.rb'
- - 'spec/lib/backup/repositories_spec.rb'
- 'spec/lib/banzai/filter/color_filter_spec.rb'
- 'spec/lib/banzai/filter/external_link_filter_spec.rb'
- 'spec/lib/banzai/filter/front_matter_filter_spec.rb'
diff --git a/.rubocop_todo/style/symbol_proc.yml b/.rubocop_todo/style/symbol_proc.yml
index 52020404660..e90a5b78f35 100644
--- a/.rubocop_todo/style/symbol_proc.yml
+++ b/.rubocop_todo/style/symbol_proc.yml
@@ -167,7 +167,6 @@ Style/SymbolProc:
- 'spec/graphql/types/work_items/widget_type_enum_spec.rb'
- 'spec/helpers/instance_configuration_helper_spec.rb'
- 'spec/helpers/members_helper_spec.rb'
- - 'spec/lib/backup/gitaly_backup_spec.rb'
- 'spec/lib/gitlab/database/migration_helpers/loose_foreign_key_helpers_spec.rb'
- 'spec/lib/gitlab/git/commit_spec.rb'
- 'spec/lib/gitlab/gpg/invalid_gpg_signature_updater_spec.rb'
diff --git a/app/assets/javascripts/members/components/avatars/user_avatar.vue b/app/assets/javascripts/members/components/avatars/user_avatar.vue
index 4260ee14a14..656d9b61cb6 100644
--- a/app/assets/javascripts/members/components/avatars/user_avatar.vue
+++ b/app/assets/javascripts/members/components/avatars/user_avatar.vue
@@ -65,6 +65,7 @@ export default {
:href="user.webUrl"
:data-user-id="user.id"
:data-username="user.username"
+ :data-email="user.email"
>
<gl-avatar-labeled
:label="user.name"
diff --git a/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue b/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue
index 3796c5440f7..9fbc22f2312 100644
--- a/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue
+++ b/app/assets/javascripts/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row.vue
@@ -104,11 +104,14 @@ export default {
// remove sha256: from the string, and show only the first 7 char
return this.tag.digest?.substring(7, 14) ?? NOT_AVAILABLE_TEXT;
},
+ publishDate() {
+ return this.tag.publishedAt || this.tag.createdAt;
+ },
publishedDate() {
- return formatDate(this.tag.createdAt, 'isoDate');
+ return formatDate(this.publishDate, 'isoDate');
},
publishedTime() {
- return formatDate(this.tag.createdAt, 'HH:MM:ss Z');
+ return formatDate(this.publishDate, 'HH:MM:ss Z');
},
formattedRevision() {
// to be removed when API response is adjusted
@@ -182,7 +185,7 @@ export default {
<span data-testid="time">
<gl-sprintf :message="$options.i18n.CREATED_AT_LABEL">
<template #timeInfo>
- <time-ago-tooltip :time="tag.createdAt" />
+ <time-ago-tooltip :time="publishDate" />
</template>
</gl-sprintf>
</span>
diff --git a/app/assets/javascripts/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql b/app/assets/javascripts/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql
index a0a80600603..0c70a443fdc 100644
--- a/app/assets/javascripts/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql
+++ b/app/assets/javascripts/packages_and_registries/container_registry/explorer/graphql/queries/get_container_repository_tags.query.graphql
@@ -22,6 +22,7 @@ query getContainerRepositoryTags(
revision
shortRevision
createdAt
+ publishedAt
totalSize
canDelete
}
diff --git a/app/assets/javascripts/user_popovers.js b/app/assets/javascripts/user_popovers.js
index ee5d6a22fc3..90c5e89a0b5 100644
--- a/app/assets/javascripts/user_popovers.js
+++ b/app/assets/javascripts/user_popovers.js
@@ -13,13 +13,14 @@ const removeTitle = (el) => {
const getPreloadedUserInfo = (dataset) => {
const userId = dataset.user || dataset.userId;
- const { username, name, avatarUrl } = dataset;
+ const { username, name, avatarUrl, email } = dataset;
return {
userId,
username,
name,
avatarUrl,
+ email,
};
};
diff --git a/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue b/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue
index 30f616dd8e1..e53e81ec98e 100644
--- a/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue
+++ b/app/assets/javascripts/vue_shared/components/user_popover/user_popover.vue
@@ -267,6 +267,10 @@ export default {
<template v-else>
<template v-if="!isBlocked">
<div class="gl-text-gray-500">
+ <div v-if="user.email" class="gl-display-flex gl-mb-2">
+ <gl-icon name="mail" class="gl-flex-shrink-0" />
+ <span ref="email" class="gl-ml-2">{{ user.email }}</span>
+ </div>
<div v-if="user.bio" class="gl-display-flex gl-mb-2">
<gl-icon name="profile" class="gl-flex-shrink-0" />
<span ref="bio" class="gl-ml-2">{{ user.bio }}</span>
diff --git a/app/assets/javascripts/work_items/components/work_item_assignees_with_edit.vue b/app/assets/javascripts/work_items/components/work_item_assignees_with_edit.vue
index bb7baed29f6..29c7e0032ef 100644
--- a/app/assets/javascripts/work_items/components/work_item_assignees_with_edit.vue
+++ b/app/assets/javascripts/work_items/components/work_item_assignees_with_edit.vue
@@ -1,6 +1,6 @@
<script>
import { GlButton } from '@gitlab/ui';
-import { isEmpty } from 'lodash';
+import { unionBy } from 'lodash';
import currentUserQuery from '~/graphql_shared/queries/current_user.query.graphql';
import groupUsersSearchQuery from '~/graphql_shared/queries/group_users_search.query.graphql';
import usersSearchQuery from '~/graphql_shared/queries/users_search.query.graphql';
@@ -139,12 +139,10 @@ export default {
return this.allowsMultipleAssignees ? __('Select assignees') : __('Select assignee');
},
filteredAssignees() {
- return isEmpty(this.searchUsers)
- ? this.assignees
- : this.searchUsers.filter(({ id }) => this.localAssigneeIds.includes(id));
+ return unionBy(this.assignees, this.searchUsers, 'id');
},
localAssignees() {
- return this.filteredAssignees || [];
+ return this.filteredAssignees.filter(({ id }) => this.localAssigneeIds.includes(id)) || [];
},
},
watch: {
@@ -238,7 +236,7 @@ export default {
:dropdown-label="dropdownLabel"
:can-update="canUpdate"
dropdown-name="assignees"
- show-footer
+ :show-footer="canInviteMembers"
:infinite-scroll="hasNextPage"
:infinite-scroll-loading="isLoadingMore"
:loading="isLoadingUsers"
diff --git a/app/graphql/types/projects/branch_rule_type.rb b/app/graphql/types/projects/branch_rule_type.rb
index 08b1203d4a3..f8ea3644945 100644
--- a/app/graphql/types/projects/branch_rule_type.rb
+++ b/app/graphql/types/projects/branch_rule_type.rb
@@ -4,7 +4,7 @@ module Types
module Projects
class BranchRuleType < BaseObject
graphql_name 'BranchRule'
- description 'List of branch rules for a project, grouped by branch name.'
+ description 'Branch rules configured for a rule target.'
authorize :read_protected_branch
alias_method :branch_rule, :object
@@ -12,7 +12,7 @@ module Types
field :name,
type: GraphQL::Types::String,
null: false,
- description: 'Branch name, with wildcards, for the branch rules.'
+ description: 'Name of the branch rule target. Includes wildcards.'
field :is_default,
type: GraphQL::Types::Boolean,
diff --git a/app/models/commit.rb b/app/models/commit.rb
index 312b1b3f70f..eca984f48fa 100644
--- a/app/models/commit.rb
+++ b/app/models/commit.rb
@@ -639,6 +639,8 @@ class Commit
end
def merged_merge_request_no_cache(user)
- MergeRequestsFinder.new(user, project_id: project_id).find_by(merge_commit_sha: id) if merge_commit?
+ return MergeRequestsFinder.new(user, project_id: project_id).find_by(merge_commit_sha: id) if merge_commit?
+
+ MergeRequestsFinder.new(user, project_id: project_id).find_by(squash_commit_sha: id)
end
end
diff --git a/app/models/container_registry/protection/rule.rb b/app/models/container_registry/protection/rule.rb
index 34d00bdef2f..6a3c79ee046 100644
--- a/app/models/container_registry/protection/rule.rb
+++ b/app/models/container_registry/protection/rule.rb
@@ -3,10 +3,6 @@
module ContainerRegistry
module Protection
class Rule < ApplicationRecord
- include IgnorableColumns
-
- ignore_column :container_path_pattern, remove_with: '16.8', remove_after: '2023-12-22'
-
enum delete_protected_up_to_access_level:
Gitlab::Access.sym_options_with_owner.slice(:maintainer, :owner, :developer),
_prefix: :delete_protected_up_to
diff --git a/app/models/merge_request.rb b/app/models/merge_request.rb
index ae68a36c8d2..39b147cfd3f 100644
--- a/app/models/merge_request.rb
+++ b/app/models/merge_request.rb
@@ -1905,6 +1905,10 @@ class MergeRequest < ApplicationRecord
@merge_commit ||= project.commit(merge_commit_sha) if merge_commit_sha
end
+ def squash_commit
+ @squash_commit ||= project.commit(squash_commit_sha) if squash_commit_sha
+ end
+
def short_merge_commit_sha
Commit.truncate_sha(merge_commit_sha) if merge_commit_sha
end
@@ -1922,10 +1926,33 @@ class MergeRequest < ApplicationRecord
end
end
+ # Exists only for merged merge requests
+ def commit_to_revert
+ return unless merged?
+
+ # By default, it's equal to a merge commit
+ return merge_commit if merge_commit
+
+ # But in case of fast-forward merge merge commits are not created
+ # To solve that we can use `squash_commit` if the merge request was squashed
+ return squash_commit if squash_commit
+
+ # Edge case: one commit in the merge request without merge or squash commit
+ return project.commit(diff_head_sha) if commits_count == 1
+
+ nil
+ end
+
+ def commit_to_cherry_pick
+ commit_to_revert
+ end
+
def can_be_reverted?(current_user)
- return false unless merge_commit
return false unless merged_at
+ commit = commit_to_revert
+ return false unless commit
+
# It is not guaranteed that Note#created_at will be strictly later than
# MergeRequestMetric#merged_at. Nanoseconds on MySQL may break this
# comparison, as will a HA environment if clocks are not *precisely*
@@ -1934,7 +1961,7 @@ class MergeRequest < ApplicationRecord
notes_association = notes_with_associations.where('created_at >= ?', cutoff)
- !merge_commit.has_been_reverted?(current_user, notes_association)
+ !commit.has_been_reverted?(current_user, notes_association)
end
def merged_at
@@ -1949,7 +1976,7 @@ class MergeRequest < ApplicationRecord
end
def can_be_cherry_picked?
- merge_commit.present?
+ commit_to_cherry_pick.present?
end
def has_complete_diff_refs?
diff --git a/app/services/import/github_service.rb b/app/services/import/github_service.rb
index 92a91740304..b8389192b18 100644
--- a/app/services/import/github_service.rb
+++ b/app/services/import/github_service.rb
@@ -5,9 +5,6 @@ module Import
include ActiveSupport::NumberHelper
include Gitlab::Utils::StrongMemoize
- MINIMUM_IMPORT_SCOPE = %w[repo].freeze
- COLLAB_IMPORT_SCOPES = %w[admin:org read:org].freeze
-
attr_accessor :client
attr_reader :params, :current_user
@@ -15,9 +12,6 @@ module Import
context_error = validate_context
return context_error if context_error
- scope_error = validate_scopes
- return scope_error if scope_error
-
project = create_project(access_params, provider)
track_access_level('github')
@@ -104,26 +98,6 @@ module Import
private
- def validate_scopes
- # We need to call `#repo` to ensure the `#last_response` from the client has the headers we need.
- repo
- scopes = client.octokit.last_response.headers["x-oauth-scopes"]
- scopes = scopes.split(',').map(&:strip)
-
- unless scopes.intersect?(MINIMUM_IMPORT_SCOPE + COLLAB_IMPORT_SCOPES)
- return log_and_return_error('Invalid Scope', _('Your GitHub access token does not have the correct scope to import.'), :unprocessable_entity)
- end
-
- collaborators_import = params.dig(:optional_stages, :collaborators_import)
- # A value for `collaborators_import` may not be included in POST params
- # and the default value is `true`
- return unless collaborators_import == true || collaborators_import.nil?
-
- return if scopes.intersect?(COLLAB_IMPORT_SCOPES)
-
- log_and_return_error('Invalid scope', _('Your GitHub access token does not have the correct scope to import collaborators.'), :unprocessable_entity)
- end
-
def validate_context
if blocked_url?
log_and_return_error("Invalid URL: #{url}", _("Invalid URL: %{url}") % { url: url }, :bad_request)
diff --git a/app/views/devise/shared/_footer.html.haml b/app/views/devise/shared/_footer.html.haml
index a8c45566d63..44c99a828d7 100644
--- a/app/views/devise/shared/_footer.html.haml
+++ b/app/views/devise/shared/_footer.html.haml
@@ -1,7 +1,7 @@
.footer-container.gl-w-full.gl-align-self-end
%hr.gl-m-0
.container.gl-py-5.gl-display-flex.gl-justify-content-space-between.gl-align-items-flex-start
- .gl-display-flex.gl-gap-5.gl-flex-wrap
+ .gl-display-none.gl-md-display-flex.gl-gap-5.gl-flex-wrap
- unless public_visibility_restricted?
= link_to _("Explore"), explore_root_path
= link_to _("Help"), help_path
diff --git a/app/views/projects/merge_requests/_page.html.haml b/app/views/projects/merge_requests/_page.html.haml
index 35eb9d2850d..7cfd102799c 100644
--- a/app/views/projects/merge_requests/_page.html.haml
+++ b/app/views/projects/merge_requests/_page.html.haml
@@ -109,9 +109,9 @@
= render 'shared/issuable/sidebar', issuable_sidebar: @issuable_sidebar, assignees: @merge_request.assignees, reviewers: @merge_request.reviewers, source_branch: @merge_request.source_branch
- if @merge_request.can_be_reverted?(current_user)
- = render "projects/commit/change", type: 'revert', commit: @merge_request.merge_commit
+ = render "projects/commit/change", type: 'revert', commit: @merge_request.commit_to_revert
- if @merge_request.can_be_cherry_picked?
- = render "projects/commit/change", type: 'cherry-pick', commit: @merge_request.merge_commit
+ = render "projects/commit/change", type: 'cherry-pick', commit: @merge_request.commit_to_cherry_pick
#js-review-bar{ data: review_bar_data(@merge_request, current_user) }
diff --git a/app/views/projects/merge_requests/widget/_commit_change_content.html.haml b/app/views/projects/merge_requests/widget/_commit_change_content.html.haml
index 572d9e7578c..58357d6147c 100644
--- a/app/views/projects/merge_requests/widget/_commit_change_content.html.haml
+++ b/app/views/projects/merge_requests/widget/_commit_change_content.html.haml
@@ -1,4 +1,4 @@
- if @merge_request.can_be_reverted?(current_user)
- = render "projects/commit/change", type: 'revert', commit: @merge_request.merge_commit
+ = render "projects/commit/change", type: 'revert', commit: @merge_request.commit_to_revert
- if @merge_request.can_be_cherry_picked?
- = render "projects/commit/change", type: 'cherry-pick', commit: @merge_request.merge_commit
+ = render "projects/commit/change", type: 'cherry-pick', commit: @merge_request.commit_to_cherry_pick
diff --git a/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_artifact.yml b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_artifact.yml
index 18a4ac13db1..5d2c646af4c 100644
--- a/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_artifact.yml
+++ b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_artifact.yml
@@ -6,4 +6,4 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141345
milestone: '16.8'
queued_migration_version: 20240109090354
finalize_after: '2024-01-22'
-finalized_by: # version of the migration that finalized this BBM
+finalized_by: 20240122133127
diff --git a/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml
index e51bef28419..1adceb3d3f3 100644
--- a/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml
+++ b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_config.yml
@@ -6,4 +6,4 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141461
milestone: '16.8'
queued_migration_version: 20240110090352
finalize_after: '2024-01-22'
-finalized_by: # version of the migration that finalized this BBM
+finalized_by: 20240122133457
diff --git a/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_metadata.yml b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_metadata.yml
index a56fcfd4cca..68d788926c9 100644
--- a/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_metadata.yml
+++ b/db/docs/batched_background_migrations/backfill_partition_id_ci_pipeline_metadata.yml
@@ -6,4 +6,4 @@ introduced_by_url: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/141078
milestone: '16.8'
queued_migration_version: 20240108082419
finalize_after: '2024-01-15'
-finalized_by: # version of the migration that finalized this BBM
+finalized_by: 20240122132856
diff --git a/db/post_migrate/20240122092018_drop_index_from_ci_build_trace_metadata.rb b/db/post_migrate/20240122092018_drop_index_from_ci_build_trace_metadata.rb
new file mode 100644
index 00000000000..32f23b2e7de
--- /dev/null
+++ b/db/post_migrate/20240122092018_drop_index_from_ci_build_trace_metadata.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class DropIndexFromCiBuildTraceMetadata < Gitlab::Database::Migration[2.2]
+ milestone '16.9'
+ disable_ddl_transaction!
+
+ INDEX_NAME = :index_ci_build_trace_metadata_on_trace_artifact_id
+ TABLE_NAME = :ci_build_trace_metadata
+
+ def up
+ remove_concurrent_index_by_name(TABLE_NAME, INDEX_NAME)
+ end
+
+ def down
+ add_concurrent_index(TABLE_NAME, :trace_artifact_id, name: INDEX_NAME)
+ end
+end
diff --git a/db/post_migrate/20240122092139_remove_fk_from_ci_build_trace_metadata_and_ci_job_artifacts.rb b/db/post_migrate/20240122092139_remove_fk_from_ci_build_trace_metadata_and_ci_job_artifacts.rb
new file mode 100644
index 00000000000..dcc3ef56f17
--- /dev/null
+++ b/db/post_migrate/20240122092139_remove_fk_from_ci_build_trace_metadata_and_ci_job_artifacts.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+class RemoveFkFromCiBuildTraceMetadataAndCiJobArtifacts < Gitlab::Database::Migration[2.2]
+ milestone '16.9'
+ disable_ddl_transaction!
+
+ SOURCE_TABLE_NAME = :ci_build_trace_metadata
+ TARGET_TABLE_NAME = :ci_job_artifacts
+ COLUMN = :trace_artifact_id
+ TARGET_COLUMN = :id
+ FK_NAME = :fk_21d25cac1a
+
+ def up
+ with_lock_retries do
+ remove_foreign_key_if_exists(
+ SOURCE_TABLE_NAME,
+ TARGET_TABLE_NAME,
+ name: FK_NAME,
+ reverse_lock_order: true
+ )
+ end
+ end
+
+ def down
+ add_concurrent_foreign_key(
+ SOURCE_TABLE_NAME,
+ TARGET_TABLE_NAME,
+ column: COLUMN,
+ target_column: TARGET_COLUMN,
+ validate: true,
+ reverse_lock_order: true,
+ on_delete: :cascade,
+ name: FK_NAME
+ )
+ end
+end
diff --git a/db/post_migrate/20240122132856_finalize_backfill_partition_id_ci_pipeline_metadata.rb b/db/post_migrate/20240122132856_finalize_backfill_partition_id_ci_pipeline_metadata.rb
new file mode 100644
index 00000000000..fd425ae2998
--- /dev/null
+++ b/db/post_migrate/20240122132856_finalize_backfill_partition_id_ci_pipeline_metadata.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class FinalizeBackfillPartitionIdCiPipelineMetadata < Gitlab::Database::Migration[2.2]
+ milestone '16.9'
+ MIGRATION = 'BackfillPartitionIdCiPipelineMetadata'
+ disable_ddl_transaction!
+ restrict_gitlab_migration gitlab_schema: :gitlab_ci
+
+ def up
+ ensure_batched_background_migration_is_finished(
+ job_class_name: MIGRATION,
+ table_name: :ci_pipeline_metadata,
+ column_name: :pipeline_id,
+ job_arguments: [],
+ finalize: true
+ )
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20240122133127_finalize_backfill_partition_id_ci_pipeline_artifact.rb b/db/post_migrate/20240122133127_finalize_backfill_partition_id_ci_pipeline_artifact.rb
new file mode 100644
index 00000000000..e7762c495c9
--- /dev/null
+++ b/db/post_migrate/20240122133127_finalize_backfill_partition_id_ci_pipeline_artifact.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class FinalizeBackfillPartitionIdCiPipelineArtifact < Gitlab::Database::Migration[2.2]
+ milestone '16.9'
+ MIGRATION = 'BackfillPartitionIdCiPipelineArtifact'
+ disable_ddl_transaction!
+ restrict_gitlab_migration gitlab_schema: :gitlab_ci
+
+ def up
+ ensure_batched_background_migration_is_finished(
+ job_class_name: MIGRATION,
+ table_name: :ci_pipeline_artifacts,
+ column_name: :id,
+ job_arguments: [],
+ finalize: true
+ )
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/post_migrate/20240122133457_finalize_backfill_partition_id_ci_pipeline_config.rb b/db/post_migrate/20240122133457_finalize_backfill_partition_id_ci_pipeline_config.rb
new file mode 100644
index 00000000000..54949795f14
--- /dev/null
+++ b/db/post_migrate/20240122133457_finalize_backfill_partition_id_ci_pipeline_config.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+class FinalizeBackfillPartitionIdCiPipelineConfig < Gitlab::Database::Migration[2.2]
+ milestone '16.9'
+ MIGRATION = 'BackfillPartitionIdCiPipelineConfig'
+ disable_ddl_transaction!
+ restrict_gitlab_migration gitlab_schema: :gitlab_ci
+
+ def up
+ ensure_batched_background_migration_is_finished(
+ job_class_name: MIGRATION,
+ table_name: :ci_pipelines_config,
+ column_name: :pipeline_id,
+ job_arguments: [],
+ finalize: true
+ )
+ end
+
+ def down
+ # no-op
+ end
+end
diff --git a/db/schema_migrations/20240122092018 b/db/schema_migrations/20240122092018
new file mode 100644
index 00000000000..5cea288ffd8
--- /dev/null
+++ b/db/schema_migrations/20240122092018
@@ -0,0 +1 @@
+943cb1f5cd218199a2c4b36f7073d42f6ec7ceace21311e8cbeb6026578db3e5 \ No newline at end of file
diff --git a/db/schema_migrations/20240122092139 b/db/schema_migrations/20240122092139
new file mode 100644
index 00000000000..9738f978160
--- /dev/null
+++ b/db/schema_migrations/20240122092139
@@ -0,0 +1 @@
+934bd0403c967d97ea8404fd14a4b6b6e4654570322d59482301bbb88c534a64 \ No newline at end of file
diff --git a/db/schema_migrations/20240122132856 b/db/schema_migrations/20240122132856
new file mode 100644
index 00000000000..cdc3ab50bd6
--- /dev/null
+++ b/db/schema_migrations/20240122132856
@@ -0,0 +1 @@
+b9ee41a5d7538e307fddad27f9a08396fe1b3a177603b3c6be50eedfcb75dcce \ No newline at end of file
diff --git a/db/schema_migrations/20240122133127 b/db/schema_migrations/20240122133127
new file mode 100644
index 00000000000..2e3151741e0
--- /dev/null
+++ b/db/schema_migrations/20240122133127
@@ -0,0 +1 @@
+521abad74f8bfddb5a5a639a20ad67bf8f2734de4c908a7a7916168ca11db2b1 \ No newline at end of file
diff --git a/db/schema_migrations/20240122133457 b/db/schema_migrations/20240122133457
new file mode 100644
index 00000000000..b46ce21ae56
--- /dev/null
+++ b/db/schema_migrations/20240122133457
@@ -0,0 +1 @@
+fec1d73c4fec7194df134a546dc0f75d9da3334e7f3d794e7bc18c760e598e83 \ No newline at end of file
diff --git a/db/structure.sql b/db/structure.sql
index bca40bba677..cd9e8be6d35 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -32836,8 +32836,6 @@ CREATE INDEX index_ci_build_trace_chunks_on_partition_id_build_id ON ci_build_tr
CREATE UNIQUE INDEX index_ci_build_trace_metadata_on_partition_id_build_id ON ci_build_trace_metadata USING btree (partition_id, build_id);
-CREATE INDEX index_ci_build_trace_metadata_on_trace_artifact_id ON ci_build_trace_metadata USING btree (trace_artifact_id);
-
CREATE INDEX index_ci_build_trace_metadata_on_trace_artifact_id_partition_id ON ci_build_trace_metadata USING btree (trace_artifact_id, partition_id);
CREATE INDEX p_ci_builds_metadata_build_id_idx ON ONLY p_ci_builds_metadata USING btree (build_id) WHERE (has_exposed_artifacts IS TRUE);
@@ -38241,9 +38239,6 @@ ALTER TABLE ONLY namespace_settings
ADD CONSTRAINT fk_20cf0eb2f9 FOREIGN KEY (default_compliance_framework_id) REFERENCES compliance_management_frameworks(id) ON DELETE SET NULL;
ALTER TABLE ONLY ci_build_trace_metadata
- ADD CONSTRAINT fk_21d25cac1a FOREIGN KEY (trace_artifact_id) REFERENCES ci_job_artifacts(id) ON DELETE CASCADE;
-
-ALTER TABLE ONLY ci_build_trace_metadata
ADD CONSTRAINT fk_21d25cac1a_p FOREIGN KEY (partition_id, trace_artifact_id) REFERENCES ci_job_artifacts(partition_id, id) ON UPDATE CASCADE ON DELETE CASCADE;
ALTER TABLE ONLY users_star_projects
diff --git a/doc/administration/settings/jira_cloud_app.md b/doc/administration/settings/jira_cloud_app.md
index c87c7c62a3a..62a8b85e79b 100644
--- a/doc/administration/settings/jira_cloud_app.md
+++ b/doc/administration/settings/jira_cloud_app.md
@@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# GitLab for Jira Cloud app administration **(FREE SELF)**
NOTE:
-This page contains information about administering the GitLab for Jira Cloud app for self-managed instances. For user documentation, see [GitLab for Jira Cloud app](../../integration/jira/connect-app.md).
+This page contains administrator documentation for the GitLab for Jira Cloud app. For user documentation, see [GitLab for Jira Cloud app](../../integration/jira/connect-app.md).
With the [GitLab for Jira Cloud](https://marketplace.atlassian.com/apps/1221011/gitlab-com-for-jira-cloud?tab=overview&hosting=cloud) app, you can connect GitLab and Jira Cloud to sync development information in real time. You can view this information in the [Jira development panel](../../integration/jira/development_panel.md).
diff --git a/doc/administration/settings/jira_cloud_app_troubleshooting.md b/doc/administration/settings/jira_cloud_app_troubleshooting.md
index cc17c620724..93d85625463 100644
--- a/doc/administration/settings/jira_cloud_app_troubleshooting.md
+++ b/doc/administration/settings/jira_cloud_app_troubleshooting.md
@@ -6,9 +6,9 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Troubleshooting GitLab for Jira Cloud app administration **(FREE SELF)**
-When administering the GitLab for Jira Cloud app for self-managed instances, you might encounter the following issues.
+When administering the GitLab for Jira Cloud app, you might encounter the following issues.
-For GitLab.com, see [GitLab for Jira Cloud app](../../integration/jira/connect-app.md#troubleshooting).
+For user documentation, see [GitLab for Jira Cloud app](../../integration/jira/connect-app.md#troubleshooting).
## Sign-in message displayed when already signed in
diff --git a/doc/administration/settings/project_integration_management.md b/doc/administration/settings/project_integration_management.md
index 7fea1bccaf6..435a8d07c0b 100644
--- a/doc/administration/settings/project_integration_management.md
+++ b/doc/administration/settings/project_integration_management.md
@@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Project integration administration **(FREE SELF)**
NOTE:
-This page contains information about administering project integrations for self-managed instances. For user documentation, see [Project integrations](../../user/project/integrations/index.md).
+This page contains administrator documentation for project integrations. For user documentation, see [Project integrations](../../user/project/integrations/index.md).
Project integrations can be configured and enabled by project administrators. As a GitLab instance
administrator, you can set default configuration parameters for a given integration that all projects
diff --git a/doc/administration/settings/slack_app.md b/doc/administration/settings/slack_app.md
index 48352a74060..5421d80d2ba 100644
--- a/doc/administration/settings/slack_app.md
+++ b/doc/administration/settings/slack_app.md
@@ -6,10 +6,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# GitLab for Slack app administration **(FREE SELF)**
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358872) for self-managed instances in GitLab 16.2.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358872) for self-managed in GitLab 16.2.
NOTE:
-This page contains information about administering the GitLab for Slack app for self-managed instances. For user documentation, see [GitLab for Slack app](../../user/project/integrations/gitlab_slack_application.md).
+This page contains administrator documentation for the GitLab for Slack app. For user documentation, see [GitLab for Slack app](../../user/project/integrations/gitlab_slack_application.md).
The GitLab for Slack app distributed through the Slack App Directory only works with GitLab.com.
On self-managed GitLab, you can create your own copy of the GitLab for Slack app from a [manifest file](https://api.slack.com/reference/manifests#creating_apps) and configure your instance.
@@ -102,9 +102,9 @@ To enable the GitLab for Slack app functionality, your network must allow inboun
## Troubleshooting
-When administering the GitLab for Slack app for self-managed instances, you might encounter the following issues.
+When administering the GitLab for Slack app, you might encounter the following issues.
-For GitLab.com, see [GitLab for Slack app](../../user/project/integrations/gitlab_slack_app_troubleshooting.md).
+For user documentation, see [GitLab for Slack app](../../user/project/integrations/gitlab_slack_app_troubleshooting.md).
### Slash commands return `dispatch_failed` in Slack
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index 38cbe5a1f85..0f2f16ffc45 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -15523,7 +15523,7 @@ Branch protection details for a branch rule.
### `BranchRule`
-List of branch rules for a project, grouped by branch name.
+Branch rules configured for a rule target.
#### Fields
@@ -15536,7 +15536,7 @@ List of branch rules for a project, grouped by branch name.
| <a id="branchruleisdefault"></a>`isDefault` | [`Boolean!`](#boolean) | Check if this branch rule protects the project's default branch. |
| <a id="branchruleisprotected"></a>`isProtected` | [`Boolean!`](#boolean) | Check if this branch rule protects access for the branch. |
| <a id="branchrulematchingbranchescount"></a>`matchingBranchesCount` | [`Int!`](#int) | Number of existing branches that match this branch rule. |
-| <a id="branchrulename"></a>`name` | [`String!`](#string) | Branch name, with wildcards, for the branch rules. |
+| <a id="branchrulename"></a>`name` | [`String!`](#string) | Name of the branch rule target. Includes wildcards. |
| <a id="branchruleupdatedat"></a>`updatedAt` | [`Time!`](#time) | Timestamp of when the branch rule was last updated. |
### `BurnupChartDailyTotals`
@@ -19415,6 +19415,7 @@ GPG signature for a signed commit.
| <a id="groupparent"></a>`parent` | [`Group`](#group) | Parent group. |
| <a id="grouppath"></a>`path` | [`String!`](#string) | Path of the namespace. |
| <a id="grouppendingmembers"></a>`pendingMembers` **{warning-solid}** | [`PendingGroupMemberConnection`](#pendinggroupmemberconnection) | **Introduced** in 16.6. This feature is an Experiment. It can be changed or removed at any time. A pending membership of a user within this group. |
+| <a id="groupproductanalyticsstoredeventslimit"></a>`productAnalyticsStoredEventsLimit` **{warning-solid}** | [`Int`](#int) | **Introduced** in 16.9. This feature is an Experiment. It can be changed or removed at any time. Number of product analytics events namespace is permitted to store per cycle. |
| <a id="groupprojectcreationlevel"></a>`projectCreationLevel` | [`String`](#string) | Permission level required to create projects in the group. |
| <a id="groupprojectscount"></a>`projectsCount` | [`Int!`](#int) | Count of direct projects in this group. |
| <a id="grouprecentissueboards"></a>`recentIssueBoards` | [`BoardConnection`](#boardconnection) | List of recently visited boards of the group. Maximum size is 4. (see [Connections](#connections)) |
@@ -23193,6 +23194,7 @@ Product analytics events for a specific month and year.
| <a id="namespacename"></a>`name` | [`String!`](#string) | Name of the namespace. |
| <a id="namespacepackagesettings"></a>`packageSettings` | [`PackageSettings`](#packagesettings) | Package settings for the namespace. |
| <a id="namespacepath"></a>`path` | [`String!`](#string) | Path of the namespace. |
+| <a id="namespaceproductanalyticsstoredeventslimit"></a>`productAnalyticsStoredEventsLimit` **{warning-solid}** | [`Int`](#int) | **Introduced** in 16.9. This feature is an Experiment. It can be changed or removed at any time. Number of product analytics events namespace is permitted to store per cycle. |
| <a id="namespacerepositorysizeexcessprojectcount"></a>`repositorySizeExcessProjectCount` | [`Int`](#int) | Number of projects in the root namespace where the repository size exceeds the limit. This only applies to namespaces under Project limit enforcement. |
| <a id="namespacerequestaccessenabled"></a>`requestAccessEnabled` | [`Boolean`](#boolean) | Indicates if users can request access to namespace. |
| <a id="namespacerootstoragestatistics"></a>`rootStorageStatistics` | [`RootStorageStatistics`](#rootstoragestatistics) | Aggregated storage statistics of the namespace. Only available for root namespaces. |
diff --git a/doc/ci/runners/saas/macos_saas_runner.md b/doc/ci/runners/saas/macos_saas_runner.md
index d48e542699e..dcf81158e82 100644
--- a/doc/ci/runners/saas/macos_saas_runner.md
+++ b/doc/ci/runners/saas/macos_saas_runner.md
@@ -37,7 +37,7 @@ in your `.gitlab-ci.yml` file. Each image runs a specific version of macOS and X
| VM image | Status | |
|----------------------------|--------|--------------|
-| `macos-12-xcode-14` | `GA` | |
+| `macos-12-xcode-14` | `Deprecated` | (Removal in GitLab 16.10) |
| `macos-13-xcode-14` | `GA` | [Preinstalled Software](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/job-images/-/blob/main/toolchain/macos-13.yml) |
| `macos-14-xcode-15` | `GA` | [Preinstalled Software](https://gitlab.com/gitlab-org/ci-cd/shared-runners/images/job-images/-/blob/main/toolchain/macos-14.yml) |
@@ -60,7 +60,7 @@ The following sample `.gitlab-ci.yml` file shows how to start using the SaaS run
.macos_saas_runners:
tags:
- saas-macos-medium-m1
- image: macos-12-xcode-14
+ image: macos-14-xcode-15
before_script:
- echo "started by ${GITLAB_USER_NAME}"
diff --git a/doc/development/sidekiq/worker_attributes.md b/doc/development/sidekiq/worker_attributes.md
index 072f34f0c96..beabe1fba03 100644
--- a/doc/development/sidekiq/worker_attributes.md
+++ b/doc/development/sidekiq/worker_attributes.md
@@ -406,3 +406,18 @@ class LimitedWorker
# ...
end
```
+
+## Skip execution of workers in Geo secondary
+
+On Geo secondary sites, database writes are disabled.
+You must skip execution of workers that attempt database writes from Geo secondary sites.
+To skip execution, prepend the `::Geo::SkipSecondary` module to the worker class.
+
+```ruby
+class DummyWorker
+ include ApplicationWorker
+ prepend ::Geo::SkipSecondary
+
+ # ...
+end
+```
diff --git a/doc/integration/jira/connect-app.md b/doc/integration/jira/connect-app.md
index 2dac6dd5cf5..5056826b1d7 100644
--- a/doc/integration/jira/connect-app.md
+++ b/doc/integration/jira/connect-app.md
@@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# GitLab for Jira Cloud app **(FREE ALL)**
NOTE:
-This page contains information about configuring the GitLab for Jira Cloud app on GitLab.com. For administrator documentation, see [GitLab for Jira Cloud app administration](../../administration/settings/jira_cloud_app.md).
+This page contains user documentation for the GitLab for Jira Cloud app. For administrator documentation, see [GitLab for Jira Cloud app administration](../../administration/settings/jira_cloud_app.md).
With the [GitLab for Jira Cloud](https://marketplace.atlassian.com/apps/1221011/gitlab-com-for-jira-cloud?tab=overview&hosting=cloud) app, you can connect GitLab and Jira Cloud to sync development information in real time. You can view this information in the [Jira development panel](development_panel.md).
@@ -100,9 +100,9 @@ and the access token is stored encrypted with `AES256-GCM` on GitLab.
## Troubleshooting
-When configuring the GitLab for Jira Cloud app on GitLab.com, you might encounter the following issues.
+When working with the GitLab for Jira Cloud app, you might encounter the following issues.
-For self-managed GitLab, see [GitLab for Jira Cloud app administration](../../administration/settings/jira_cloud_app_troubleshooting.md).
+For administrator documentation, see [GitLab for Jira Cloud app administration](../../administration/settings/jira_cloud_app_troubleshooting.md).
### Error when connecting the app
diff --git a/doc/user/group/epics/epic_boards.md b/doc/user/group/epics/epic_boards.md
index 0547f947419..62d410bdc5a 100644
--- a/doc/user/group/epics/epic_boards.md
+++ b/doc/user/group/epics/epic_boards.md
@@ -181,7 +181,7 @@ Prerequisites:
To move an epic to the start of the list:
1. In an epic board, hover over the card of the epic you want to move.
-1. Select the vertical ellipsis (**{ellipsis_v}**), then **Move to start of list**.
+1. Select **Card options** (**{ellipsis_v}**), then **Move to start of list**.
#### Move an epic to the end of the list
@@ -199,7 +199,7 @@ Prerequisites:
To move an epic to the end of the list:
1. In an epic board, hover over the card of the epic you want to move.
-1. Select the vertical ellipsis (**{ellipsis_v}**), then **Move to end of list**.
+1. Select **Card options** (**{ellipsis_v}**), then **Move to end of list**.
#### Dragging epics between lists
diff --git a/doc/user/project/integrations/gitlab_slack_app_troubleshooting.md b/doc/user/project/integrations/gitlab_slack_app_troubleshooting.md
index 5a0e3458107..a219a9ffdd4 100644
--- a/doc/user/project/integrations/gitlab_slack_app_troubleshooting.md
+++ b/doc/user/project/integrations/gitlab_slack_app_troubleshooting.md
@@ -6,9 +6,9 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Troubleshooting GitLab for Slack app **(FREE ALL)**
-When configuring the GitLab for Slack app on GitLab.com, you might encounter the following issues.
+When working with the GitLab for Slack app, you might encounter the following issues.
-For self-managed GitLab, see [GitLab for Slack app administration](../../../administration/settings/slack_app.md#troubleshooting).
+For administrator documentation, see [GitLab for Slack app administration](../../../administration/settings/slack_app.md#troubleshooting).
## App does not appear in the list of integrations
diff --git a/doc/user/project/integrations/gitlab_slack_application.md b/doc/user/project/integrations/gitlab_slack_application.md
index 6b80edd1a05..b13cc3f686a 100644
--- a/doc/user/project/integrations/gitlab_slack_application.md
+++ b/doc/user/project/integrations/gitlab_slack_application.md
@@ -6,10 +6,10 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# GitLab for Slack app **(FREE ALL)**
-> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358872) for self-managed instances in GitLab 16.2.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/358872) for self-managed in GitLab 16.2.
NOTE:
-This page contains information about configuring the GitLab for Slack app on GitLab.com. For administrator documentation, see [GitLab for Slack app administration](../../../administration/settings/slack_app.md).
+This page contains user documentation for the GitLab for Slack app. For administrator documentation, see [GitLab for Slack app administration](../../../administration/settings/slack_app.md).
The GitLab for Slack app is a native Slack app that provides [slash commands](#slash-commands) and [notifications](#slack-notifications)
in your Slack workspace. GitLab links your Slack user with your GitLab user so that any command
diff --git a/doc/user/project/integrations/index.md b/doc/user/project/integrations/index.md
index bf1d0826e11..42be43bba64 100644
--- a/doc/user/project/integrations/index.md
+++ b/doc/user/project/integrations/index.md
@@ -7,7 +7,7 @@ info: To determine the technical writer assigned to the Stage/Group associated w
# Project integrations **(FREE ALL)**
NOTE:
-This page contains information about configuring project integrations on GitLab.com. For administrator documentation, see [Project integration administration](../../../administration/settings/project_integration_management.md).
+This page contains user documentation for project integrations. For administrator documentation, see [Project integration administration](../../../administration/settings/project_integration_management.md).
You can integrate with external applications to add functionality to GitLab.
diff --git a/doc/user/project/issue_board.md b/doc/user/project/issue_board.md
index 39353480908..2af657fd9b9 100644
--- a/doc/user/project/issue_board.md
+++ b/doc/user/project/issue_board.md
@@ -602,7 +602,7 @@ Prerequisites:
To move an issue to the start of the list:
1. In an issue board, hover over the card of the issue you want to move.
-1. Select the vertical ellipsis (**{ellipsis_v}**), then **Move to start of list**.
+1. Select **Card options** (**{ellipsis_v}**), then **Move to start of list**.
#### Move an issue to the end of the list
@@ -619,7 +619,7 @@ Prerequisites:
To move an issue to the end of the list:
1. In an issue board, hover over the card of the issue you want to move.
-1. Select the vertical ellipsis (**{ellipsis_v}**), then **Move to end of list**.
+1. Select **Card options** (**{ellipsis_v}**), then **Move to end of list**.
#### Dragging issues between lists
diff --git a/doc/user/project/merge_requests/cherry_pick_changes.md b/doc/user/project/merge_requests/cherry_pick_changes.md
index 3d3d302856f..7ac80c81911 100644
--- a/doc/user/project/merge_requests/cherry_pick_changes.md
+++ b/doc/user/project/merge_requests/cherry_pick_changes.md
@@ -56,9 +56,12 @@ Prerequisites:
- You must have a role in the project that allows you to edit merge requests, and add
code to the repository.
- Your project must use the [merge method](methods/index.md#fast-forward-merge) **Merge Commit**,
- which is set in the project's **Settings > Merge requests**. Fast-forwarded commits
- can't be cherry-picked from the GitLab UI, but the individual commits can
- [still be cherry-picked](#cherry-pick-a-single-commit).
+ which is set in the project's **Settings > Merge requests**.
+
+ [In GitLab 16.9 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/142152), fast-forwarded
+ commits can be cherry-picked from the GitLab UI only when they are squashed or when the
+ merge request contains a single commit.
+ You can always [cherry-pick individual commits](#cherry-pick-a-single-commit).
To do this:
diff --git a/doc/user/project/merge_requests/revert_changes.md b/doc/user/project/merge_requests/revert_changes.md
index 72bc82bbf0f..96bf071cf02 100644
--- a/doc/user/project/merge_requests/revert_changes.md
+++ b/doc/user/project/merge_requests/revert_changes.md
@@ -25,8 +25,11 @@ Prerequisites:
- You must have a role in the project that allows you to edit merge requests, and add
code to the repository.
- Your project must use the [merge method](methods/index.md#fast-forward-merge) **Merge Commit**,
- which is set in the project's **Settings > Merge requests**. You can't revert
- fast-forwarded commits from the GitLab UI.
+ which is set in the project's **Settings > Merge requests**.
+
+ [In GitLab 16.9 and later](https://gitlab.com/gitlab-org/gitlab/-/issues/142152), you can revert
+ fast-forwarded commits from the GitLab UI only when they are squashed or when the
+ merge request contains a single commit.
To do this:
diff --git a/lib/backup/database.rb b/lib/backup/database.rb
deleted file mode 100644
index 962214407d9..00000000000
--- a/lib/backup/database.rb
+++ /dev/null
@@ -1,277 +0,0 @@
-# frozen_string_literal: true
-
-require 'yaml'
-
-module Backup
- class Database < Task
- extend ::Gitlab::Utils::Override
- include Backup::Helper
- attr_reader :force
-
- IGNORED_ERRORS = [
- # Ignore warnings
- /WARNING:/,
- # Ignore the DROP errors; recent database dumps will use --if-exists with pg_dump
- /does not exist$/,
- # User may not have permissions to drop extensions or schemas
- /must be owner of/
- ].freeze
- IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
-
- def initialize(progress, options:, force:)
- super(progress, options: options)
- @force = force
- end
-
- override :dump
- def dump(destination_dir, _)
- FileUtils.mkdir_p(destination_dir)
-
- each_database(destination_dir) do |backup_connection|
- pg_env = backup_connection.database_configuration.pg_env_variables
- active_record_config = backup_connection.database_configuration.activerecord_variables
- pg_database_name = active_record_config[:database]
-
- dump_file_name = file_name(destination_dir, backup_connection.connection_name)
- FileUtils.rm_f(dump_file_name)
-
- progress.print "Dumping PostgreSQL database #{pg_database_name} ... "
-
- schemas = []
-
- if Gitlab.config.backup.pg_schema
- schemas << Gitlab.config.backup.pg_schema
- schemas.push(*Gitlab::Database::EXTRA_SCHEMAS.map(&:to_s))
- end
-
- pg_dump = ::Gitlab::Backup::Cli::Utils::PgDump.new(
- database_name: pg_database_name,
- snapshot_id: backup_connection.snapshot_id,
- schemas: schemas,
- env: pg_env)
-
- success = Backup::Dump::Postgres.new.dump(dump_file_name, pg_dump)
-
- backup_connection.release_snapshot! if backup_connection.snapshot_id
-
- raise DatabaseBackupError.new(active_record_config, dump_file_name) unless success
-
- report_success(success)
- progress.flush
- end
- ensure
- ::Gitlab::Database::EachDatabase.each_connection(
- only: base_models_for_backup.keys, include_shared: false
- ) do |connection, _|
- Gitlab::Database::TransactionTimeoutSettings.new(connection).restore_timeouts
- end
- end
-
- override :restore
- def restore(destination_dir, backup_id)
- base_models_for_backup.each do |database_name, _|
- backup_connection = Backup::DatabaseConnection.new(database_name)
-
- config = backup_connection.database_configuration.activerecord_variables
-
- db_file_name = file_name(destination_dir, database_name)
- database = config[:database]
-
- unless File.exist?(db_file_name)
- raise(Backup::Error, "Source database file does not exist #{db_file_name}") if main_database?(database_name)
-
- progress.puts "Source backup for the database #{database_name} doesn't exist. Skipping the task"
- return false
- end
-
- unless force
- progress.puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
- sleep(5)
- end
-
- # Drop all tables Load the schema to ensure we don't have any newer tables
- # hanging out from a failed upgrade
- drop_tables(database_name)
-
- pg_env = backup_connection.database_configuration.pg_env_variables
- success = with_transient_pg_env(pg_env) do
- decompress_rd, decompress_wr = IO.pipe
- decompress_pid = spawn(decompress_cmd, out: decompress_wr, in: db_file_name)
- decompress_wr.close
-
- status, @errors =
- case config[:adapter]
- when "postgresql" then
- progress.print "Restoring PostgreSQL database #{database} ... "
- execute_and_track_errors(pg_restore_cmd(database), decompress_rd)
- end
- decompress_rd.close
-
- Process.waitpid(decompress_pid)
- $?.success? && status.success?
- end
-
- if @errors.present?
- progress.print "------ BEGIN ERRORS -----\n".color(:yellow)
- progress.print @errors.join.color(:yellow)
- progress.print "------ END ERRORS -------\n".color(:yellow)
- end
-
- report_success(success)
- raise Backup::Error, 'Restore failed' unless success
- end
- end
-
- override :pre_restore_warning
- def pre_restore_warning
- return if force
-
- <<-MSG.strip_heredoc
- Be sure to stop Puma, Sidekiq, and any other process that
- connects to the database before proceeding. For Omnibus
- installs, see the following link for more information:
- https://docs.gitlab.com/ee/raketasks/backup_restore.html#restore-for-omnibus-gitlab-installations
-
- Before restoring the database, we will remove all existing
- tables to avoid future upgrade problems. Be aware that if you have
- custom tables in the GitLab database these tables and all data will be
- removed.
- MSG
- end
-
- override :post_restore_warning
- def post_restore_warning
- return unless @errors.present?
-
- <<-MSG.strip_heredoc
- There were errors in restoring the schema. This may cause
- issues if this results in missing indexes, constraints, or
- columns. Please record the errors above and contact GitLab
- Support if you have questions:
- https://about.gitlab.com/support/
- MSG
- end
-
- protected
-
- def base_models_for_backup
- @base_models_for_backup ||= Gitlab::Database.database_base_models_with_gitlab_shared
- end
-
- def main_database?(database_name)
- database_name.to_sym == :main
- end
-
- def file_name(base_dir, database_name)
- prefix = if database_name.to_sym != :main
- "#{database_name}_"
- else
- ''
- end
-
- File.join(base_dir, "#{prefix}database.sql.gz")
- end
-
- def ignore_error?(line)
- IGNORED_ERRORS_REGEXP.match?(line)
- end
-
- def execute_and_track_errors(cmd, decompress_rd)
- errors = []
-
- Open3.popen3(ENV, *cmd) do |stdin, stdout, stderr, thread|
- stdin.binmode
-
- out_reader = Thread.new do
- data = stdout.read
- $stdout.write(data)
- end
-
- err_reader = Thread.new do
- until (raw_line = stderr.gets).nil?
- warn(raw_line)
- errors << raw_line unless ignore_error?(raw_line)
- end
- end
-
- begin
- IO.copy_stream(decompress_rd, stdin)
- rescue Errno::EPIPE
- end
-
- stdin.close
- [thread, out_reader, err_reader].each(&:join)
- [thread.value, errors]
- end
- end
-
- def report_success(success)
- if success
- progress.puts '[DONE]'.color(:green)
- else
- progress.puts '[FAILED]'.color(:red)
- end
- end
-
- private
-
- def drop_tables(database_name)
- puts_time 'Cleaning the database ... '.color(:blue)
-
- if Rake::Task.task_defined? "gitlab:db:drop_tables:#{database_name}"
- Rake::Task["gitlab:db:drop_tables:#{database_name}"].invoke
- else
- # In single database (single or two connections)
- Rake::Task["gitlab:db:drop_tables"].invoke
- end
-
- puts_time 'done'.color(:green)
- end
-
- # @deprecated This will be removed when restore operation is refactored to use extended_env directly
- def with_transient_pg_env(extended_env)
- ENV.merge!(extended_env)
- result = yield
- ENV.reject! { |k, _| extended_env.key?(k) }
-
- result
- end
-
- def pg_restore_cmd(database)
- ['psql', database]
- end
-
- def each_database(destination_dir, &block)
- databases = []
-
- # each connection will loop through all database connections defined in `database.yml`
- # and reject the ones that are shared, so we don't get duplicates
- #
- # we consider a connection to be shared when it has `database_tasks: false`
- ::Gitlab::Database::EachDatabase.each_connection(
- only: base_models_for_backup.keys, include_shared: false
- ) do |_, database_connection_name|
- backup_connection = Backup::DatabaseConnection.new(database_connection_name)
- databases << backup_connection
-
- next unless multiple_databases?
-
- begin
- # Trigger a transaction snapshot export that will be used by pg_dump later on
- backup_connection.export_snapshot!
- rescue ActiveRecord::ConnectionNotEstablished
- raise Backup::DatabaseBackupError.new(
- backup_connection.database_configuration.activerecord_variables,
- file_name(destination_dir, database_connection_name)
- )
- end
- end
-
- databases.each(&block)
- end
-
- def multiple_databases?
- Gitlab::Database.database_mode == Gitlab::Database::MODE_MULTIPLE_DATABASES
- end
- end
-end
diff --git a/lib/backup/files.rb b/lib/backup/files.rb
deleted file mode 100644
index adf9a081ad5..00000000000
--- a/lib/backup/files.rb
+++ /dev/null
@@ -1,163 +0,0 @@
-# frozen_string_literal: true
-
-require 'open3'
-
-module Backup
- class Files < Task
- extend ::Gitlab::Utils::Override
- include Backup::Helper
-
- DEFAULT_EXCLUDE = 'lost+found'
-
- attr_reader :excludes
-
- def initialize(progress, app_files_dir, options:, excludes: [])
- super(progress, options: options)
-
- @app_files_dir = app_files_dir
- @excludes = [DEFAULT_EXCLUDE].concat(excludes)
- end
-
- # Copy files from public/files to backup/files
- override :dump
- def dump(backup_tarball, backup_id)
- FileUtils.mkdir_p(Gitlab.config.backup.path)
- FileUtils.rm_f(backup_tarball)
-
- if ENV['STRATEGY'] == 'copy'
- cmd = [%w[rsync -a --delete], exclude_dirs(:rsync), %W[#{app_files_realpath} #{Gitlab.config.backup.path}]].flatten
- output, status = Gitlab::Popen.popen(cmd)
-
- # Retry if rsync source files vanish
- if status == 24
- $stdout.puts "Warning: files vanished during rsync, retrying..."
- output, status = Gitlab::Popen.popen(cmd)
- end
-
- unless status == 0
- puts output
- raise_custom_error(backup_tarball)
- end
-
- tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{backup_files_realpath} -cf - .]].flatten
- status_list, output = run_pipeline!([tar_cmd, compress_cmd], out: [backup_tarball, 'w', 0600])
- FileUtils.rm_rf(backup_files_realpath)
- else
- tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{app_files_realpath} -cf - .]].flatten
- status_list, output = run_pipeline!([tar_cmd, compress_cmd], out: [backup_tarball, 'w', 0600])
- end
-
- unless pipeline_succeeded?(tar_status: status_list[0], compress_status: status_list[1], output: output)
- raise_custom_error(backup_tarball)
- end
- end
-
- override :restore
- def restore(backup_tarball, backup_id)
- backup_existing_files_dir(backup_tarball)
-
- cmd_list = [decompress_cmd, %W[#{tar} --unlink-first --recursive-unlink -C #{app_files_realpath} -xf -]]
- status_list, output = run_pipeline!(cmd_list, in: backup_tarball)
- unless pipeline_succeeded?(compress_status: status_list[0], tar_status: status_list[1], output: output)
- raise Backup::Error, "Restore operation failed: #{output}"
- end
- end
-
- def tar
- if system(*%w[gtar --version], out: '/dev/null')
- # It looks like we can get GNU tar by running 'gtar'
- 'gtar'
- else
- 'tar'
- end
- end
-
- def backup_existing_files_dir(backup_tarball)
- name = File.basename(backup_tarball, '.tar.gz')
-
- timestamped_files_path = File.join(Gitlab.config.backup.path, "tmp", "#{name}.#{Time.now.to_i}")
- if File.exist?(app_files_realpath)
- # Move all files in the existing repos directory except . and .. to
- # repositories.<timestamp> directory
- FileUtils.mkdir_p(timestamped_files_path, mode: 0700)
- files = Dir.glob(File.join(app_files_realpath, "*"), File::FNM_DOTMATCH) - [File.join(app_files_realpath, "."), File.join(app_files_realpath, "..")]
- begin
- FileUtils.mv(files, timestamped_files_path)
- rescue Errno::EACCES
- access_denied_error(app_files_realpath)
- rescue Errno::EBUSY
- resource_busy_error(app_files_realpath)
- end
- end
- end
-
- def run_pipeline!(cmd_list, options = {})
- err_r, err_w = IO.pipe
- options[:err] = err_w
- status_list = Open3.pipeline(*cmd_list, options)
- err_w.close
-
- [status_list, err_r.read]
- end
-
- def noncritical_warning?(warning)
- noncritical_warnings = [
- /^g?tar: \.: Cannot mkdir: No such file or directory$/
- ]
-
- noncritical_warnings.map { |w| warning =~ w }.any?
- end
-
- def pipeline_succeeded?(tar_status:, compress_status:, output:)
- return false unless compress_status&.success?
-
- tar_status&.success? || tar_ignore_non_success?(tar_status.exitstatus, output)
- end
-
- def tar_ignore_non_success?(exitstatus, output)
- # tar can exit with nonzero code:
- # 1 - if some files changed (i.e. a CI job is currently writes to log)
- # 2 - if it cannot create `.` directory (see issue https://gitlab.com/gitlab-org/gitlab/-/issues/22442)
- # http://www.gnu.org/software/tar/manual/html_section/tar_19.html#Synopsis
- # so check tar status 1 or stderr output against some non-critical warnings
- if exitstatus == 1
- $stdout.puts "Ignoring tar exit status 1 'Some files differ': #{output}"
- return true
- end
-
- # allow tar to fail with other non-success status if output contain non-critical warning
- if noncritical_warning?(output)
- $stdout.puts "Ignoring non-success exit status #{exitstatus} due to output of non-critical warning(s): #{output}"
- return true
- end
-
- false
- end
-
- def exclude_dirs(fmt)
- excludes.map do |s|
- if s == DEFAULT_EXCLUDE
- '--exclude=' + s
- elsif fmt == :rsync
- '--exclude=/' + File.join(File.basename(app_files_realpath), s)
- elsif fmt == :tar
- '--exclude=./' + s
- end
- end
- end
-
- def raise_custom_error(backup_tarball)
- raise FileBackupError.new(app_files_realpath, backup_tarball)
- end
-
- private
-
- def app_files_realpath
- @app_files_realpath ||= File.realpath(@app_files_dir)
- end
-
- def backup_files_realpath
- @backup_files_realpath ||= File.join(Gitlab.config.backup.path, File.basename(@app_files_dir))
- end
- end
-end
diff --git a/lib/backup/manager.rb b/lib/backup/manager.rb
index a53b82c63b7..fdf4b6eea12 100644
--- a/lib/backup/manager.rb
+++ b/lib/backup/manager.rb
@@ -5,24 +5,6 @@ module Backup
FILE_NAME_SUFFIX = '_gitlab_backup.tar'
MANIFEST_NAME = 'backup_information.yml'
- # pages used to deploy tmp files to this path
- # if some of these files are still there, we don't need them in the backup
- LEGACY_PAGES_TMP_PATH = '@pages.tmp'
-
- TaskDefinition = Struct.new(
- :enabled, # `true` if the task can be used. Treated as `true` when not specified.
- :human_name, # Name of the task used for logging.
- :destination_path, # Where the task should put its backup file/dir.
- :destination_optional, # `true` if the destination might not exist on a successful backup.
- :cleanup_path, # Path to remove after a successful backup. Uses `destination_path` when not specified.
- :task,
- keyword_init: true
- ) do
- def enabled?
- enabled.nil? || enabled
- end
- end
-
attr_reader :progress, :remote_storage, :options
def initialize(progress, definitions: nil)
@@ -58,13 +40,13 @@ module Backup
return
end
- if skipped?(task_name)
+ if options.skip_task?(task_name)
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "[SKIPPED]".color(:cyan)
return
end
puts_time "Dumping #{definition.human_name} ... ".color(:blue)
- definition.task.dump(destination_dir, backup_id)
+ definition.target.dump(destination_dir, backup_id)
puts_time "Dumping #{definition.human_name} ... ".color(:blue) + "done".color(:green)
rescue Backup::DatabaseBackupError, Backup::FileBackupError => e
@@ -92,17 +74,17 @@ module Backup
puts_time "Restoring #{definition.human_name} ... ".color(:blue)
- warning = definition.task.pre_restore_warning
+ warning = definition.target.pre_restore_warning
if warning.present?
puts_time warning.color(:red)
Gitlab::TaskHelpers.ask_to_continue
end
- definition.task.restore(File.join(Gitlab.config.backup.path, definition.destination_path), backup_id)
+ definition.target.restore(File.join(Gitlab.config.backup.path, definition.destination_path), backup_id)
puts_time "Restoring #{definition.human_name} ... ".color(:blue) + "done".color(:green)
- warning = definition.task.post_restore_warning
+ warning = definition.target.post_restore_warning
if warning.present?
puts_time warning.color(:red)
Gitlab::TaskHelpers.ask_to_continue
@@ -116,97 +98,22 @@ module Backup
private
def definitions
- @definitions ||= build_definitions
- end
-
- def build_definitions # rubocop:disable Metrics/AbcSize
- {
- 'db' => TaskDefinition.new(
- human_name: _('database'),
- destination_path: 'db',
- cleanup_path: 'db',
- task: build_db_task
- ),
- 'repositories' => TaskDefinition.new(
- human_name: _('repositories'),
- destination_path: 'repositories',
- destination_optional: true,
- task: build_repositories_task
- ),
- 'uploads' => TaskDefinition.new(
- human_name: _('uploads'),
- destination_path: 'uploads.tar.gz',
- task: build_files_task(File.join(Gitlab.config.uploads.storage_path, 'uploads'), excludes: ['tmp'])
- ),
- 'builds' => TaskDefinition.new(
- human_name: _('builds'),
- destination_path: 'builds.tar.gz',
- task: build_files_task(Settings.gitlab_ci.builds_path)
- ),
- 'artifacts' => TaskDefinition.new(
- human_name: _('artifacts'),
- destination_path: 'artifacts.tar.gz',
- task: build_files_task(JobArtifactUploader.root, excludes: ['tmp'])
- ),
- 'pages' => TaskDefinition.new(
- human_name: _('pages'),
- destination_path: 'pages.tar.gz',
- task: build_files_task(Gitlab.config.pages.path, excludes: [LEGACY_PAGES_TMP_PATH])
- ),
- 'lfs' => TaskDefinition.new(
- human_name: _('lfs objects'),
- destination_path: 'lfs.tar.gz',
- task: build_files_task(Settings.lfs.storage_path)
- ),
- 'terraform_state' => TaskDefinition.new(
- human_name: _('terraform states'),
- destination_path: 'terraform_state.tar.gz',
- task: build_files_task(Settings.terraform_state.storage_path, excludes: ['tmp'])
- ),
- 'registry' => TaskDefinition.new(
- enabled: Gitlab.config.registry.enabled,
- human_name: _('container registry images'),
- destination_path: 'registry.tar.gz',
- task: build_files_task(Settings.registry.path)
- ),
- 'packages' => TaskDefinition.new(
- human_name: _('packages'),
- destination_path: 'packages.tar.gz',
- task: build_files_task(Settings.packages.storage_path, excludes: ['tmp'])
- ),
- 'ci_secure_files' => TaskDefinition.new(
- human_name: _('ci secure files'),
- destination_path: 'ci_secure_files.tar.gz',
- task: build_files_task(Settings.ci_secure_files.storage_path, excludes: ['tmp'])
- )
+ @definitions ||= {
+ Backup::Tasks::Database.id => Backup::Tasks::Database.new(progress: progress, options: options),
+ Backup::Tasks::Repositories.id => Backup::Tasks::Repositories.new(progress: progress, options: options,
+ server_side: backup_information[:repositories_server_side]),
+ Backup::Tasks::Uploads.id => Backup::Tasks::Uploads.new(progress: progress, options: options),
+ Backup::Tasks::Builds.id => Backup::Tasks::Builds.new(progress: progress, options: options),
+ Backup::Tasks::Artifacts.id => Backup::Tasks::Artifacts.new(progress: progress, options: options),
+ Backup::Tasks::Pages.id => Backup::Tasks::Pages.new(progress: progress, options: options),
+ Backup::Tasks::Lfs.id => Backup::Tasks::Lfs.new(progress: progress, options: options),
+ Backup::Tasks::TerraformState.id => Backup::Tasks::TerraformState.new(progress: progress, options: options),
+ Backup::Tasks::Registry.id => Backup::Tasks::Registry.new(progress: progress, options: options),
+ Backup::Tasks::Packages.id => Backup::Tasks::Packages.new(progress: progress, options: options),
+ Backup::Tasks::CiSecureFiles.id => Backup::Tasks::CiSecureFiles.new(progress: progress, options: options)
}.freeze
end
- def build_db_task
- Database.new(progress, options: options, force: options.force?)
- end
-
- def build_repositories_task
- strategy = Backup::GitalyBackup.new(progress,
- incremental: options.incremental?,
- max_parallelism: options.max_parallelism,
- storage_parallelism: options.max_storage_parallelism,
- server_side: backup_information[:repositories_server_side]
- )
-
- Repositories.new(progress,
- strategy: strategy,
- options: options,
- storages: options.repositories_storages,
- paths: options.repositories_paths,
- skip_paths: options.skip_repositories_paths
- )
- end
-
- def build_files_task(app_files_dir, excludes: [])
- Files.new(progress, app_files_dir, options: options, excludes: excludes)
- end
-
def run_all_create_tasks
if options.incremental?
read_backup_information
@@ -237,8 +144,8 @@ module Backup
read_backup_information
verify_backup_version
- definitions.each_key do |task_name|
- if !skipped?(task_name) && enabled_task?(task_name)
+ definitions.each do |task_name, definition|
+ if !options.skip_task?(task_name) && definition.enabled?
run_restore_task(task_name)
end
end
@@ -476,14 +383,6 @@ module Backup
tar_version.dup.force_encoding('locale').split("\n").first
end
- def skipped?(item)
- options.skippable_tasks[item]
- end
-
- def enabled_task?(task_name)
- definitions[task_name].enabled?
- end
-
def backup_file?(file)
file.match(/^(\d{10})(?:_\d{4}_\d{2}_\d{2}(_\d+\.\d+\.\d+((-|\.)(pre|rc\d))?(-ee)?)?)?_gitlab_backup\.tar$/)
end
@@ -510,7 +409,7 @@ module Backup
def backup_contents
[MANIFEST_NAME] + definitions.reject do |name, definition|
- skipped?(name) || !enabled_task?(name) ||
+ options.skip_task?(name) || !definition.enabled? ||
(definition.destination_optional && !File.exist?(File.join(backup_path, definition.destination_path)))
end.values.map(&:destination_path)
end
diff --git a/lib/backup/options.rb b/lib/backup/options.rb
index f0747594fe3..599ba56baf5 100644
--- a/lib/backup/options.rb
+++ b/lib/backup/options.rb
@@ -242,6 +242,10 @@ module Backup
extract_skippable_tasks(list)
end
+ def skip_task?(task_name)
+ !!skippable_tasks[task_name]
+ end
+
private
def extract_skippable_operations!(list)
diff --git a/lib/backup/repositories.rb b/lib/backup/repositories.rb
deleted file mode 100644
index 6d8d5272bfa..00000000000
--- a/lib/backup/repositories.rb
+++ /dev/null
@@ -1,138 +0,0 @@
-# frozen_string_literal: true
-
-require 'yaml'
-
-module Backup
- # Backup and restores repositories by querying the database
- class Repositories < Task
- extend ::Gitlab::Utils::Override
-
- # @param [IO] progress IO interface to output progress
- # @param [Object] :strategy Fetches backups from gitaly
- # @param [Array<String>] :storages Filter by specified storage names. Empty means all storages.
- # @param [Array<String>] :paths Filter by specified project paths. Empty means all projects, groups, and snippets.
- # @param [Array<String>] :skip_paths Skip specified project paths. Empty means all projects, groups, and snippets.
- def initialize(progress, strategy:, options:, storages: [], paths: [], skip_paths: [])
- super(progress, options: options)
-
- @strategy = strategy
- @storages = storages
- @paths = paths
- @skip_paths = skip_paths
- end
-
- override :dump
- def dump(destination_path, backup_id)
- strategy.start(:create, destination_path, backup_id: backup_id)
- enqueue_consecutive
-
- ensure
- strategy.finish!
- end
-
- override :restore
- def restore(destination_path, backup_id)
- strategy.start(:restore, destination_path, remove_all_repositories: remove_all_repositories, backup_id: backup_id)
- enqueue_consecutive
-
- ensure
- strategy.finish!
-
- restore_object_pools
- end
-
- private
-
- attr_reader :strategy, :storages, :paths, :skip_paths
-
- def remove_all_repositories
- return if paths.present?
-
- storages.presence || Gitlab.config.repositories.storages.keys
- end
-
- def enqueue_consecutive
- enqueue_consecutive_projects
- enqueue_consecutive_snippets
- end
-
- def enqueue_consecutive_projects
- project_relation.find_each(batch_size: 1000) do |project|
- enqueue_project(project)
- end
- end
-
- def enqueue_consecutive_snippets
- snippet_relation.find_each(batch_size: 1000) { |snippet| enqueue_snippet(snippet) }
- end
-
- def enqueue_project(project)
- strategy.enqueue(project, Gitlab::GlRepository::PROJECT)
- strategy.enqueue(project, Gitlab::GlRepository::WIKI)
-
- return unless project.design_management_repository
-
- strategy.enqueue(project.design_management_repository, Gitlab::GlRepository::DESIGN)
- end
-
- def enqueue_snippet(snippet)
- strategy.enqueue(snippet, Gitlab::GlRepository::SNIPPET)
- end
-
- def project_relation
- scope = Project.includes(:route, :group, :namespace)
- scope = scope.id_in(ProjectRepository.for_repository_storage(storages).select(:project_id)) if storages.any?
- if paths.any?
- scope = scope.where_full_path_in(paths).or(
- Project.where(namespace_id: Namespace.where_full_path_in(paths).self_and_descendants)
- )
- end
-
- scope = scope.and(skipped_path_relation) if skip_paths.any?
- scope
- end
-
- def snippet_relation
- scope = Snippet.all
- scope = scope.id_in(SnippetRepository.for_repository_storage(storages).select(:snippet_id)) if storages.any?
- if paths.any?
- scope = scope.joins(:project).merge(
- Project.where_full_path_in(paths).or(
- Project.where(namespace_id: Namespace.where_full_path_in(paths).self_and_descendants)
- )
- )
- end
-
- if skip_paths.any?
- scope = scope.where(project: skipped_path_relation)
- scope = scope.or(Snippet.where(project: nil)) if !paths.any? && !storages.any?
- end
-
- scope
- end
-
- def skipped_path_relation
- Project.where.not(id: Project.where_full_path_in(skip_paths).or(
- Project.where(namespace_id: Namespace.where_full_path_in(skip_paths).self_and_descendants)
- ))
- end
-
- def restore_object_pools
- PoolRepository.includes(:source_project).find_each do |pool|
- progress.puts " - Object pool #{pool.disk_path}..."
-
- unless pool.source_project
- progress.puts " - Object pool #{pool.disk_path}... " + "[SKIPPED]".color(:cyan)
- next
- end
-
- pool.state = 'none'
- pool.save
-
- pool.schedule
- end
- end
- end
-end
-
-Backup::Repositories.prepend_mod_with('Backup::Repositories')
diff --git a/lib/backup/targets/database.rb b/lib/backup/targets/database.rb
new file mode 100644
index 00000000000..dbcb48b9e7d
--- /dev/null
+++ b/lib/backup/targets/database.rb
@@ -0,0 +1,283 @@
+# frozen_string_literal: true
+
+require 'yaml'
+
+module Backup
+ module Targets
+ class Database < Target
+ extend ::Gitlab::Utils::Override
+ include Backup::Helper
+ attr_reader :force
+
+ IGNORED_ERRORS = [
+ # Ignore warnings
+ /WARNING:/,
+ # Ignore the DROP errors; recent database dumps will use --if-exists with pg_dump
+ /does not exist$/,
+ # User may not have permissions to drop extensions or schemas
+ /must be owner of/
+ ].freeze
+ IGNORED_ERRORS_REGEXP = Regexp.union(IGNORED_ERRORS).freeze
+
+ def initialize(progress, options:, force:)
+ super(progress, options: options)
+ @force = force
+ end
+
+ override :dump
+
+ def dump(destination_dir, _)
+ FileUtils.mkdir_p(destination_dir)
+
+ each_database(destination_dir) do |backup_connection|
+ pg_env = backup_connection.database_configuration.pg_env_variables
+ active_record_config = backup_connection.database_configuration.activerecord_variables
+ pg_database_name = active_record_config[:database]
+
+ dump_file_name = file_name(destination_dir, backup_connection.connection_name)
+ FileUtils.rm_f(dump_file_name)
+
+ progress.print "Dumping PostgreSQL database #{pg_database_name} ... "
+
+ schemas = []
+
+ if Gitlab.config.backup.pg_schema
+ schemas << Gitlab.config.backup.pg_schema
+ schemas.push(*Gitlab::Database::EXTRA_SCHEMAS.map(&:to_s))
+ end
+
+ pg_dump = ::Gitlab::Backup::Cli::Utils::PgDump.new(
+ database_name: pg_database_name,
+ snapshot_id: backup_connection.snapshot_id,
+ schemas: schemas,
+ env: pg_env)
+
+ success = Backup::Dump::Postgres.new.dump(dump_file_name, pg_dump)
+
+ backup_connection.release_snapshot! if backup_connection.snapshot_id
+
+ raise DatabaseBackupError.new(active_record_config, dump_file_name) unless success
+
+ report_success(success)
+ progress.flush
+ end
+ ensure
+ ::Gitlab::Database::EachDatabase.each_connection(
+ only: base_models_for_backup.keys, include_shared: false
+ ) do |connection, _|
+ Gitlab::Database::TransactionTimeoutSettings.new(connection).restore_timeouts
+ end
+ end
+
+ override :restore
+
+ def restore(destination_dir, _)
+ base_models_for_backup.each do |database_name, _|
+ backup_connection = Backup::DatabaseConnection.new(database_name)
+
+ config = backup_connection.database_configuration.activerecord_variables
+
+ db_file_name = file_name(destination_dir, database_name)
+ database = config[:database]
+
+ unless File.exist?(db_file_name)
+ raise(Backup::Error, "Source database file does not exist #{db_file_name}") if main_database?(database_name)
+
+ progress.puts "Source backup for the database #{database_name} doesn't exist. Skipping the task"
+ return false
+ end
+
+ unless force
+ progress.puts 'Removing all tables. Press `Ctrl-C` within 5 seconds to abort'.color(:yellow)
+ sleep(5)
+ end
+
+ # Drop all tables Load the schema to ensure we don't have any newer tables
+ # hanging out from a failed upgrade
+ drop_tables(database_name)
+
+ pg_env = backup_connection.database_configuration.pg_env_variables
+ success = with_transient_pg_env(pg_env) do
+ decompress_rd, decompress_wr = IO.pipe
+ decompress_pid = spawn(decompress_cmd, out: decompress_wr, in: db_file_name)
+ decompress_wr.close
+
+ status, @errors =
+ case config[:adapter]
+ when "postgresql" then
+ progress.print "Restoring PostgreSQL database #{database} ... "
+ execute_and_track_errors(pg_restore_cmd(database), decompress_rd)
+ end
+ decompress_rd.close
+
+ Process.waitpid(decompress_pid)
+ $?.success? && status.success?
+ end
+
+ if @errors.present?
+ progress.print "------ BEGIN ERRORS -----\n".color(:yellow)
+ progress.print @errors.join.color(:yellow)
+ progress.print "------ END ERRORS -------\n".color(:yellow)
+ end
+
+ report_success(success)
+ raise Backup::Error, 'Restore failed' unless success
+ end
+ end
+
+ override :pre_restore_warning
+
+ def pre_restore_warning
+ return if force
+
+ <<-MSG.strip_heredoc
+ Be sure to stop Puma, Sidekiq, and any other process that
+ connects to the database before proceeding. For Omnibus
+ installs, see the following link for more information:
+ #{help_page_url('raketasks/backup_restore.html', 'restore-for-omnibus-gitlab-installations')}
+
+ Before restoring the database, we will remove all existing
+ tables to avoid future upgrade problems. Be aware that if you have
+ custom tables in the GitLab database these tables and all data will be
+ removed.
+ MSG
+ end
+
+ override :post_restore_warning
+
+ def post_restore_warning
+ return unless @errors.present?
+
+ <<-MSG.strip_heredoc
+ There were errors in restoring the schema. This may cause
+ issues if this results in missing indexes, constraints, or
+ columns. Please record the errors above and contact GitLab
+ Support if you have questions:
+ https://about.gitlab.com/support/
+ MSG
+ end
+
+ protected
+
+ def base_models_for_backup
+ @base_models_for_backup ||= Gitlab::Database.database_base_models_with_gitlab_shared
+ end
+
+ def main_database?(database_name)
+ database_name.to_sym == :main
+ end
+
+ def file_name(base_dir, database_name)
+ prefix = database_name.to_sym != :main ? "#{database_name}_" : ''
+
+ File.join(base_dir, "#{prefix}database.sql.gz")
+ end
+
+ def ignore_error?(line)
+ IGNORED_ERRORS_REGEXP.match?(line)
+ end
+
+ def execute_and_track_errors(cmd, decompress_rd)
+ errors = []
+
+ Open3.popen3(ENV, *cmd) do |stdin, stdout, stderr, thread|
+ stdin.binmode
+
+ out_reader = Thread.new do
+ data = stdout.read
+ $stdout.write(data)
+ end
+
+ err_reader = Thread.new do
+ until (raw_line = stderr.gets).nil?
+ warn(raw_line)
+ errors << raw_line unless ignore_error?(raw_line)
+ end
+ end
+
+ begin
+ IO.copy_stream(decompress_rd, stdin)
+ rescue Errno::EPIPE
+ end
+
+ stdin.close
+ [thread, out_reader, err_reader].each(&:join)
+ [thread.value, errors]
+ end
+ end
+
+ def report_success(success)
+ if success
+ progress.puts '[DONE]'.color(:green)
+ else
+ progress.puts '[FAILED]'.color(:red)
+ end
+ end
+
+ private
+
+ def drop_tables(database_name)
+ puts_time 'Cleaning the database ... '.color(:blue)
+
+ if Rake::Task.task_defined? "gitlab:db:drop_tables:#{database_name}"
+ Rake::Task["gitlab:db:drop_tables:#{database_name}"].invoke
+ else
+ # In single database (single or two connections)
+ Rake::Task["gitlab:db:drop_tables"].invoke
+ end
+
+ puts_time 'done'.color(:green)
+ end
+
+ # @deprecated This will be removed when restore operation is refactored to use extended_env directly
+ def with_transient_pg_env(extended_env)
+ ENV.merge!(extended_env)
+ result = yield
+ ENV.reject! { |k, _| extended_env.key?(k) }
+
+ result
+ end
+
+ def pg_restore_cmd(database)
+ ['psql', database]
+ end
+
+ def each_database(destination_dir, &block)
+ databases = []
+
+ # each connection will loop through all database connections defined in `database.yml`
+ # and reject the ones that are shared, so we don't get duplicates
+ #
+ # we consider a connection to be shared when it has `database_tasks: false`
+ ::Gitlab::Database::EachDatabase.each_connection(
+ only: base_models_for_backup.keys, include_shared: false
+ ) do |_, database_connection_name|
+ backup_connection = Backup::DatabaseConnection.new(database_connection_name)
+ databases << backup_connection
+
+ next unless multiple_databases?
+
+ begin
+ # Trigger a transaction snapshot export that will be used by pg_dump later on
+ backup_connection.export_snapshot!
+ rescue ActiveRecord::ConnectionNotEstablished
+ raise Backup::DatabaseBackupError.new(
+ backup_connection.database_configuration.activerecord_variables,
+ file_name(destination_dir, database_connection_name)
+ )
+ end
+ end
+
+ databases.each(&block)
+ end
+
+ def multiple_databases?
+ Gitlab::Database.database_mode == Gitlab::Database::MODE_MULTIPLE_DATABASES
+ end
+
+ def help_page_url(path, anchor = nil)
+ ::Gitlab::Routing.url_helpers.help_page_url(path, anchor: anchor)
+ end
+ end
+ end
+end
diff --git a/lib/backup/targets/files.rb b/lib/backup/targets/files.rb
new file mode 100644
index 00000000000..ea5154ebffe
--- /dev/null
+++ b/lib/backup/targets/files.rb
@@ -0,0 +1,174 @@
+# frozen_string_literal: true
+
+require 'open3'
+
+module Backup
+ module Targets
+ class Files < Target
+ extend ::Gitlab::Utils::Override
+ include Backup::Helper
+
+ DEFAULT_EXCLUDE = 'lost+found'
+
+ attr_reader :excludes
+
+ def initialize(progress, app_files_dir, options:, excludes: [])
+ super(progress, options: options)
+
+ @app_files_dir = app_files_dir
+ @excludes = [DEFAULT_EXCLUDE].concat(excludes)
+ end
+
+ # Copy files from public/files to backup/files
+ override :dump
+
+ def dump(backup_tarball, _)
+ FileUtils.mkdir_p(backup_basepath)
+ FileUtils.rm_f(backup_tarball)
+
+ if ENV['STRATEGY'] == 'copy'
+ cmd = [%w[rsync -a --delete], exclude_dirs(:rsync), %W[#{app_files_realpath} #{backup_basepath}]].flatten
+ output, status = Gitlab::Popen.popen(cmd)
+
+ # Retry if rsync source files vanish
+ if status == 24
+ $stdout.puts "Warning: files vanished during rsync, retrying..."
+ output, status = Gitlab::Popen.popen(cmd)
+ end
+
+ unless status == 0
+ puts output
+ raise_custom_error(backup_tarball)
+ end
+
+ tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{backup_files_realpath} -cf - .]].flatten
+ status_list, output = run_pipeline!([tar_cmd, compress_cmd], out: [backup_tarball, 'w', 0o600])
+ FileUtils.rm_rf(backup_files_realpath)
+ else
+ tar_cmd = [tar, exclude_dirs(:tar), %W[-C #{app_files_realpath} -cf - .]].flatten
+ status_list, output = run_pipeline!([tar_cmd, compress_cmd], out: [backup_tarball, 'w', 0o600])
+ end
+
+ success = pipeline_succeeded?(tar_status: status_list[0], compress_status: status_list[1], output: output)
+
+ raise_custom_error(backup_tarball) unless success
+ end
+
+ override :restore
+
+ def restore(backup_tarball, _)
+ backup_existing_files_dir(backup_tarball)
+
+ cmd_list = [decompress_cmd, %W[#{tar} --unlink-first --recursive-unlink -C #{app_files_realpath} -xf -]]
+ status_list, output = run_pipeline!(cmd_list, in: backup_tarball)
+ success = pipeline_succeeded?(compress_status: status_list[0], tar_status: status_list[1], output: output)
+
+ raise Backup::Error, "Restore operation failed: #{output}" unless success
+ end
+
+ def tar
+ if system(*%w[gtar --version], out: '/dev/null')
+ # It looks like we can get GNU tar by running 'gtar'
+ 'gtar'
+ else
+ 'tar'
+ end
+ end
+
+ def backup_existing_files_dir(backup_tarball)
+ name = File.basename(backup_tarball, '.tar.gz')
+ timestamped_files_path = backup_basepath.join('tmp', "#{name}.#{Time.now.to_i}")
+
+ return unless File.exist?(app_files_realpath)
+
+ # Move all files in the existing repos directory except . and .. to
+ # repositories.<timestamp> directory
+ FileUtils.mkdir_p(timestamped_files_path, mode: 0o700)
+
+ dot_references = [File.join(app_files_realpath, "."), File.join(app_files_realpath, "..")]
+ matching_files = Dir.glob(File.join(app_files_realpath, "*"), File::FNM_DOTMATCH)
+ files = matching_files - dot_references
+
+ FileUtils.mv(files, timestamped_files_path)
+ rescue Errno::EACCES
+ access_denied_error(app_files_realpath)
+ rescue Errno::EBUSY
+ resource_busy_error(app_files_realpath)
+ end
+
+ def run_pipeline!(cmd_list, options = {})
+ err_r, err_w = IO.pipe
+ options[:err] = err_w
+ status_list = Open3.pipeline(*cmd_list, options)
+ err_w.close
+
+ [status_list, err_r.read]
+ end
+
+ def noncritical_warning?(warning)
+ noncritical_warnings = [
+ /^g?tar: \.: Cannot mkdir: No such file or directory$/
+ ]
+
+ noncritical_warnings.map { |w| warning =~ w }.any?
+ end
+
+ def pipeline_succeeded?(tar_status:, compress_status:, output:)
+ return false unless compress_status&.success?
+
+ tar_status&.success? || tar_ignore_non_success?(tar_status.exitstatus, output)
+ end
+
+ def tar_ignore_non_success?(exitstatus, output)
+ # tar can exit with nonzero code:
+ # 1 - if some files changed (i.e. a CI job is currently writes to log)
+ # 2 - if it cannot create `.` directory (see issue https://gitlab.com/gitlab-org/gitlab/-/issues/22442)
+ # http://www.gnu.org/software/tar/manual/html_section/tar_19.html#Synopsis
+ # so check tar status 1 or stderr output against some non-critical warnings
+ if exitstatus == 1
+ $stdout.puts "Ignoring tar exit status 1 'Some files differ': #{output}"
+ return true
+ end
+
+ # allow tar to fail with other non-success status if output contain non-critical warning
+ if noncritical_warning?(output)
+ $stdout.puts(
+ "Ignoring non-success exit status #{exitstatus} due to output of non-critical warning(s): #{output}")
+ return true
+ end
+
+ false
+ end
+
+ def exclude_dirs(fmt)
+ excludes.map do |s|
+ if s == DEFAULT_EXCLUDE
+ "--exclude=#{s}"
+ elsif fmt == :rsync
+ "--exclude=/#{File.join(File.basename(app_files_realpath), s)}"
+ elsif fmt == :tar
+ "--exclude=./#{s}"
+ end
+ end
+ end
+
+ def raise_custom_error(backup_tarball)
+ raise FileBackupError.new(app_files_realpath, backup_tarball)
+ end
+
+ private
+
+ def app_files_realpath
+ @app_files_realpath ||= File.realpath(@app_files_dir)
+ end
+
+ def backup_files_realpath
+ @backup_files_realpath ||= backup_basepath.join(File.basename(@app_files_dir))
+ end
+
+ def backup_basepath
+ Pathname(Gitlab.config.backup.path)
+ end
+ end
+ end
+end
diff --git a/lib/backup/targets/repositories.rb b/lib/backup/targets/repositories.rb
new file mode 100644
index 00000000000..6d4456434fb
--- /dev/null
+++ b/lib/backup/targets/repositories.rb
@@ -0,0 +1,145 @@
+# frozen_string_literal: true
+
+require 'yaml'
+
+module Backup
+ module Targets
+ # Backup and restores repositories by querying the database
+ class Repositories < Target
+ extend ::Gitlab::Utils::Override
+
+ # @param [IO] progress IO interface to output progress
+ # @param [Object] :strategy Fetches backups from gitaly
+ # @param [Array<String>] :storages Filter by specified storage names. Empty means all storages.
+ # @param [Array<String>] :paths Filter by specified project paths. Empty means all projects, groups, and snippets.
+ # @param [Array<String>] :skip_paths Skip specified project paths. Empty means all projects, groups, and snippets.
+ def initialize(progress, strategy:, options:, storages: [], paths: [], skip_paths: [])
+ super(progress, options: options)
+
+ @strategy = strategy
+ @storages = storages
+ @paths = paths
+ @skip_paths = skip_paths
+ end
+
+ override :dump
+
+ def dump(destination_path, backup_id)
+ strategy.start(:create, destination_path, backup_id: backup_id)
+ enqueue_consecutive
+
+ ensure
+ strategy.finish!
+ end
+
+ override :restore
+
+ def restore(destination_path, backup_id)
+ strategy.start(:restore,
+ destination_path,
+ remove_all_repositories: remove_all_repositories,
+ backup_id: backup_id)
+ enqueue_consecutive
+
+ ensure
+ strategy.finish!
+
+ restore_object_pools
+ end
+
+ private
+
+ attr_reader :strategy, :storages, :paths, :skip_paths
+
+ def remove_all_repositories
+ return if paths.present?
+
+ storages.presence || Gitlab.config.repositories.storages.keys
+ end
+
+ def enqueue_consecutive
+ enqueue_consecutive_projects
+ enqueue_consecutive_snippets
+ end
+
+ def enqueue_consecutive_projects
+ project_relation.find_each(batch_size: 1000) do |project|
+ enqueue_project(project)
+ end
+ end
+
+ def enqueue_consecutive_snippets
+ snippet_relation.find_each(batch_size: 1000) { |snippet| enqueue_snippet(snippet) }
+ end
+
+ def enqueue_project(project)
+ strategy.enqueue(project, Gitlab::GlRepository::PROJECT)
+ strategy.enqueue(project, Gitlab::GlRepository::WIKI)
+
+ return unless project.design_management_repository
+
+ strategy.enqueue(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ end
+
+ def enqueue_snippet(snippet)
+ strategy.enqueue(snippet, Gitlab::GlRepository::SNIPPET)
+ end
+
+ def project_relation
+ scope = Project.includes(:route, :group, :namespace)
+ scope = scope.id_in(ProjectRepository.for_repository_storage(storages).select(:project_id)) if storages.any?
+ if paths.any?
+ scope = scope.where_full_path_in(paths).or(
+ Project.where(namespace_id: Namespace.where_full_path_in(paths).self_and_descendants)
+ )
+ end
+
+ scope = scope.and(skipped_path_relation) if skip_paths.any?
+ scope
+ end
+
+ def snippet_relation
+ scope = Snippet.all
+ scope = scope.id_in(SnippetRepository.for_repository_storage(storages).select(:snippet_id)) if storages.any?
+ if paths.any?
+ scope = scope.joins(:project).merge(
+ Project.where_full_path_in(paths).or(
+ Project.where(namespace_id: Namespace.where_full_path_in(paths).self_and_descendants)
+ )
+ )
+ end
+
+ if skip_paths.any?
+ scope = scope.where(project: skipped_path_relation)
+ scope = scope.or(Snippet.where(project: nil)) if !paths.any? && !storages.any?
+ end
+
+ scope
+ end
+
+ def skipped_path_relation
+ Project.where.not(id: Project.where_full_path_in(skip_paths).or(
+ Project.where(namespace_id: Namespace.where_full_path_in(skip_paths).self_and_descendants)
+ ))
+ end
+
+ def restore_object_pools
+ PoolRepository.includes(:source_project).find_each do |pool|
+ progress.puts " - Object pool #{pool.disk_path}..."
+
+ unless pool.source_project
+ progress.puts " - Object pool #{pool.disk_path}... " + "[SKIPPED]".color(:cyan)
+ next
+ end
+
+ pool.state = 'none'
+ pool.save
+
+ pool.schedule
+ end
+ end
+ end
+ end
+end
+
+Backup::Targets::Repositories.prepend_mod_with('Backup::Targets::Repositories')
diff --git a/lib/backup/targets/target.rb b/lib/backup/targets/target.rb
new file mode 100644
index 00000000000..aae41bb6d93
--- /dev/null
+++ b/lib/backup/targets/target.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module Backup
+ module Targets
+ class Target
+ # Backup creation and restore option flags
+ #
+ # @return [Backup::Options]
+ attr_reader :options
+
+ def initialize(progress, options:)
+ @progress = progress
+ @options = options
+ end
+
+ # dump task backup to `path`
+ #
+ # @param [String] path fully qualified backup task destination
+ # @param [String] backup_id unique identifier for the backup
+ def dump(path, backup_id)
+ raise NotImplementedError
+ end
+
+ # restore task backup from `path`
+ def restore(path, backup_id)
+ raise NotImplementedError
+ end
+
+ # a string returned here will be displayed to the user before calling #restore
+ def pre_restore_warning = ''
+
+ # a string returned here will be displayed to the user after calling #restore
+ def post_restore_warning = ''
+
+ private
+
+ attr_reader :progress
+
+ def puts_time(msg)
+ progress.puts "#{Time.zone.now} -- #{msg}"
+ Gitlab::BackupLogger.info(message: Rainbow.uncolor(msg).to_s)
+ end
+ end
+ end
+end
diff --git a/lib/backup/task.rb b/lib/backup/task.rb
deleted file mode 100644
index b5f6c9e6330..00000000000
--- a/lib/backup/task.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-module Backup
- class Task
- # Backup creation and restore option flags
- #
- # @return [Backup::Options]
- attr_reader :options
-
- def initialize(progress, options:)
- @progress = progress
- @options = options
- end
-
- # dump task backup to `path`
- #
- # @param [String] path fully qualified backup task destination
- # @param [String] backup_id unique identifier for the backup
- def dump(path, backup_id)
- raise NotImplementedError
- end
-
- # restore task backup from `path`
- def restore(path, backup_id)
- raise NotImplementedError
- end
-
- # a string returned here will be displayed to the user before calling #restore
- def pre_restore_warning
- end
-
- # a string returned here will be displayed to the user after calling #restore
- def post_restore_warning
- end
-
- private
-
- attr_reader :progress
-
- def puts_time(msg)
- progress.puts "#{Time.zone.now} -- #{msg}"
- Gitlab::BackupLogger.info(message: "#{Rainbow.uncolor(msg)}")
- end
- end
-end
diff --git a/lib/backup/tasks/artifacts.rb b/lib/backup/tasks/artifacts.rb
new file mode 100644
index 00000000000..0179a486b67
--- /dev/null
+++ b/lib/backup/tasks/artifacts.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Artifacts < Task
+ def self.id = 'artifacts'
+
+ def human_name = _('artifacts')
+
+ def destination_path = 'artifacts.tar.gz'
+
+ def target
+ excludes = ['tmp']
+
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options, excludes: excludes)
+ end
+
+ private
+
+ def app_files_dir
+ JobArtifactUploader.root
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/builds.rb b/lib/backup/tasks/builds.rb
new file mode 100644
index 00000000000..b1ed0454e7d
--- /dev/null
+++ b/lib/backup/tasks/builds.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Builds < Task
+ def self.id = 'builds'
+
+ def human_name = _('builds')
+
+ def destination_path = 'builds.tar.gz'
+
+ def target
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options)
+ end
+
+ private
+
+ def app_files_dir
+ Settings.gitlab_ci.builds_path
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/ci_secure_files.rb b/lib/backup/tasks/ci_secure_files.rb
new file mode 100644
index 00000000000..d51e5962413
--- /dev/null
+++ b/lib/backup/tasks/ci_secure_files.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class CiSecureFiles < Task
+ def self.id = 'ci_secure_files'
+
+ def human_name = _('ci secure files')
+
+ def destination_path = 'ci_secure_files.tar.gz'
+
+ def target
+ excludes = ['tmp']
+
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options, excludes: excludes)
+ end
+
+ private
+
+ def app_files_dir
+ Settings.ci_secure_files.storage_path
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/database.rb b/lib/backup/tasks/database.rb
new file mode 100644
index 00000000000..8abc7bc0137
--- /dev/null
+++ b/lib/backup/tasks/database.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Database < Task
+ def self.id = 'db'
+
+ def human_name = _('database')
+
+ def destination_path = 'db'
+
+ def cleanup_path = 'db'
+
+ def target
+ ::Backup::Targets::Database.new(progress, options: options, force: options.force?)
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/lfs.rb b/lib/backup/tasks/lfs.rb
new file mode 100644
index 00000000000..3bb3774233a
--- /dev/null
+++ b/lib/backup/tasks/lfs.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Lfs < Task
+ def self.id = 'lfs'
+
+ def human_name = _('lfs objects')
+
+ def destination_path = 'lfs.tar.gz'
+
+ def target
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options)
+ end
+
+ private
+
+ def app_files_dir
+ Settings.lfs.storage_path
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/packages.rb b/lib/backup/tasks/packages.rb
new file mode 100644
index 00000000000..a85a3e8ba6c
--- /dev/null
+++ b/lib/backup/tasks/packages.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Packages < Task
+ def self.id = 'packages'
+
+ def human_name = _('packages')
+
+ def destination_path = 'packages.tar.gz'
+
+ def target
+ excludes = ['tmp']
+
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options, excludes: excludes)
+ end
+
+ private
+
+ def app_files_dir
+ Settings.packages.storage_path
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/pages.rb b/lib/backup/tasks/pages.rb
new file mode 100644
index 00000000000..a6b49f2d7e4
--- /dev/null
+++ b/lib/backup/tasks/pages.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Pages < Task
+ # pages used to deploy tmp files to this path
+ # if some of these files are still there, we don't need them in the backup
+ LEGACY_PAGES_TMP_PATH = '@pages.tmp'
+
+ def self.id = 'pages'
+
+ def human_name = _('pages')
+
+ def destination_path = 'pages.tar.gz'
+
+ def target
+ excludes = [LEGACY_PAGES_TMP_PATH]
+
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options, excludes: excludes)
+ end
+
+ private
+
+ def app_files_dir
+ Gitlab.config.pages.path
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/registry.rb b/lib/backup/tasks/registry.rb
new file mode 100644
index 00000000000..a1f8a3805d1
--- /dev/null
+++ b/lib/backup/tasks/registry.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Registry < Task
+ def self.id = 'registry'
+
+ def enabled = Gitlab.config.registry.enabled
+
+ def human_name = _('container registry images')
+
+ def destination_path = 'registry.tar.gz'
+
+ def target
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options)
+ end
+
+ private
+
+ def app_files_dir
+ Settings.registry.path
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/repositories.rb b/lib/backup/tasks/repositories.rb
new file mode 100644
index 00000000000..7a858875737
--- /dev/null
+++ b/lib/backup/tasks/repositories.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Repositories < Task
+ attr_reader :server_side
+
+ def self.id = 'repositories'
+
+ def initialize(progress:, options:, server_side:)
+ @server_side = server_side
+
+ super(progress: progress, options: options)
+ end
+
+ def human_name = _('repositories')
+
+ def destination_path = 'repositories'
+
+ def destination_optional = true
+
+ def target
+ strategy = Backup::GitalyBackup.new(progress,
+ incremental: options.incremental?,
+ max_parallelism: options.max_parallelism,
+ storage_parallelism: options.max_storage_parallelism,
+ server_side: server_side
+ )
+
+ ::Backup::Targets::Repositories.new(progress,
+ strategy: strategy,
+ options: options,
+ storages: options.repositories_storages,
+ paths: options.repositories_paths,
+ skip_paths: options.skip_repositories_paths
+ )
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/task.rb b/lib/backup/tasks/task.rb
new file mode 100644
index 00000000000..4727e19b550
--- /dev/null
+++ b/lib/backup/tasks/task.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Task
+ attr_reader :progress, :options
+
+ # Identifier used as parameter in the CLI to skip from executing
+ def self.id = raise NotImplementedError
+
+ def initialize(progress:, options:)
+ @progress = progress
+ @options = options
+ end
+
+ # Key string that identifies the task
+ def key = raise NotImplementedError
+
+ # Name of the task used for logging.
+ def human_name = raise NotImplementedError
+
+ # Where the task should put its backup file/dir
+ def destination_path = raise NotImplementedError
+
+ # The target factory method
+ def target = raise NotImplementedError
+
+ # Path to remove after a successful backup, uses #destination_path when not specified
+ def cleanup_path
+ destination_path
+ end
+
+ # `true` if the destination might not exist on a successful backup
+ def destination_optional = false
+
+ # `true` if the task can be used
+ def enabled = true
+
+ def enabled? = enabled
+ end
+ end
+end
diff --git a/lib/backup/tasks/terraform_state.rb b/lib/backup/tasks/terraform_state.rb
new file mode 100644
index 00000000000..45387b1f6cf
--- /dev/null
+++ b/lib/backup/tasks/terraform_state.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class TerraformState < Task
+ def self.id = 'terraform_state'
+
+ def human_name = _('terraform states')
+
+ def destination_path = 'terraform_state.tar.gz'
+
+ def target
+ excludes = ['tmp']
+
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options, excludes: excludes)
+ end
+
+ private
+
+ def app_files_dir
+ Settings.terraform_state.storage_path
+ end
+ end
+ end
+end
diff --git a/lib/backup/tasks/uploads.rb b/lib/backup/tasks/uploads.rb
new file mode 100644
index 00000000000..b83e8179b91
--- /dev/null
+++ b/lib/backup/tasks/uploads.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+module Backup
+ module Tasks
+ class Uploads < Task
+ def self.id = 'uploads'
+
+ def human_name = _('uploads')
+
+ def destination_path = 'uploads.tar.gz'
+
+ def target
+ excludes = ['tmp']
+
+ ::Backup::Targets::Files.new(progress, app_files_dir, options: options, excludes: excludes)
+ end
+
+ private
+
+ def app_files_dir
+ File.join(Gitlab.config.uploads.storage_path, 'uploads')
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 041dc9640ce..c1b1daf08e9 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -12129,6 +12129,9 @@ msgstr ""
msgid "CodeSuggestions|Duo Pro add-on"
msgstr ""
+msgid "CodeSuggestions|Duo Pro add-on status"
+msgstr ""
+
msgid "CodeSuggestions|Duo Pro seats used"
msgstr ""
@@ -18335,9 +18338,6 @@ msgstr ""
msgid "Due to inactivity, this project is scheduled to be deleted on %{deletion_date}. %{link_start}Why is this scheduled?%{link_end}"
msgstr ""
-msgid "Duo Pro add-on status"
-msgstr ""
-
msgid "Duplicate page: A page with that title already exists"
msgstr ""
@@ -57097,12 +57097,6 @@ msgstr ""
msgid "Your GPG keys"
msgstr ""
-msgid "Your GitHub access token does not have the correct scope to import collaborators."
-msgstr ""
-
-msgid "Your GitHub access token does not have the correct scope to import."
-msgstr ""
-
msgid "Your GitLab account has been locked due to an excessive number of unsuccessful sign in attempts. You can wait for your account to automatically unlock in %{duration} or you can click the link below to unlock now."
msgstr ""
diff --git a/scripts/review_apps/base-config.yaml b/scripts/review_apps/base-config.yaml
index ef73d7bfd49..04c3e4d13f2 100644
--- a/scripts/review_apps/base-config.yaml
+++ b/scripts/review_apps/base-config.yaml
@@ -94,7 +94,7 @@ gitlab:
memory: 1500Mi
limits:
cpu: 700m
- memory: 2200Mi
+ memory: 2400Mi
hpa:
cpu:
targetAverageValue: 650m
diff --git a/spec/factories/gitlab/backup/options.rb b/spec/factories/gitlab/backup/options.rb
index 7cd7c5795ab..6288d142a08 100644
--- a/spec/factories/gitlab/backup/options.rb
+++ b/spec/factories/gitlab/backup/options.rb
@@ -45,9 +45,18 @@ FactoryBot.define do
repositories_paths
skip_repositories_paths
remote_directory
+ skip_all
+ compression_options { attributes_for(:backup_compression_options, :all) }
+ end
+
+ trait :skip_all do
skippable_tasks { attributes_for(:backup_skippable_tasks, :skip_all) }
skippable_operations { attributes_for(:backup_skippable_operations, :skip_all) }
- compression_options { attributes_for(:backup_compression_options, :all) }
+ end
+
+ trait :skip_none do
+ skippable_tasks { attributes_for(:backup_skippable_tasks, :skip_none) }
+ skippable_operations { attributes_for(:backup_skippable_operations, :skip_none) }
end
end
diff --git a/spec/features/merge_request/user_reverts_merge_request_spec.rb b/spec/features/merge_request/user_reverts_merge_request_spec.rb
index c2f82039f0b..2130ca9d323 100644
--- a/spec/features/merge_request/user_reverts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_reverts_merge_request_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe 'User reverts a merge request', :js, feature_category: :code_review_workflow do
include Spec::Support::Helpers::ModalHelpers
- let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
+ let(:merge_request) { create(:merge_request, :simple, source_project: project) }
before do
project.add_developer(user)
@@ -59,6 +59,22 @@ RSpec.describe 'User reverts a merge request', :js, feature_category: :code_revi
expect(page).not_to have_link('Revert')
end
+ context 'when project merge method is fast-forward merge and squash is enabled' do
+ let(:merge_request) { create(:merge_request, target_branch: 'master', source_branch: 'compare-with-merge-head-target', source_project: project, squash: true) }
+
+ before do
+ project.update!(merge_requests_ff_only_enabled: true)
+ end
+
+ it 'reverts a merge request', :sidekiq_might_not_need_inline do
+ revert_commit
+
+ wait_for_requests
+
+ expect(page).to have_content('The merge request has been successfully reverted.')
+ end
+ end
+
def revert_commit(create_merge_request: false)
click_button 'Revert'
diff --git a/spec/frontend/members/components/avatars/user_avatar_spec.js b/spec/frontend/members/components/avatars/user_avatar_spec.js
index 4808bcb9363..6b60b402a62 100644
--- a/spec/frontend/members/components/avatars/user_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/user_avatar_spec.js
@@ -36,6 +36,7 @@ describe('UserAvatar', () => {
href: user.webUrl,
'data-user-id': `${user.id}`,
'data-username': user.username,
+ 'data-email': user.email,
});
});
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index f550039bfdc..a81f0bcf35f 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -34,6 +34,7 @@ export const member = {
availability: null,
lastActivityOn: '2022-03-15',
showStatus: true,
+ email: 'my@email.com',
},
id: 238,
createdAt: '2020-07-17T16:22:46.923Z',
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
index 9f3431ef5a5..ef5fbbc418c 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
@@ -242,10 +242,20 @@ describe('tags list row', () => {
expect(findTimeAgoTooltip().exists()).toBe(true);
});
- it('pass the correct props to time ago tooltip', () => {
+ it('passes publishedAt value to time ago tooltip', () => {
mountComponent();
- expect(findTimeAgoTooltip().attributes()).toMatchObject({ time: tag.createdAt });
+ expect(findTimeAgoTooltip().attributes()).toMatchObject({ time: tag.publishedAt });
+ });
+
+ describe('when publishedAt is missing', () => {
+ beforeEach(() => {
+ mountComponent({ ...defaultProps, tag: { ...tag, publishedAt: null } });
+ });
+
+ it('passes createdAt value to time ago tooltip', () => {
+ expect(findTimeAgoTooltip().attributes()).toMatchObject({ time: tag.createdAt });
+ });
});
});
@@ -351,7 +361,7 @@ describe('tags list row', () => {
describe.each`
name | finderFunction | text | icon | clipboard
- ${'published date detail'} | ${findPublishedDateDetail} | ${'Published to the gitlab-org/gitlab-test/rails-12009 image repository at 13:29:38 UTC on 2020-11-03'} | ${'clock'} | ${false}
+ ${'published date detail'} | ${findPublishedDateDetail} | ${'Published to the gitlab-org/gitlab-test/rails-12009 image repository at 13:29:38 UTC on 2020-11-05'} | ${'clock'} | ${false}
${'manifest detail'} | ${findManifestDetail} | ${'Manifest digest: sha256:2cf3d2fdac1b04a14301d47d51cb88dcd26714c74f91440eeee99ce399089062'} | ${'log'} | ${true}
${'configuration detail'} | ${findConfigurationDetail} | ${'Configuration digest: sha256:c2613843ab33aabf847965442b13a8b55a56ae28837ce182627c0716eb08c02b'} | ${'cloud-gear'} | ${true}
`('$name details row', ({ finderFunction, text, icon, clipboard }) => {
@@ -387,6 +397,18 @@ describe('tags list row', () => {
});
}
});
+
+ describe('when publishedAt is missing', () => {
+ beforeEach(() => {
+ mountComponent({ ...defaultProps, tag: { ...tag, publishedAt: null } });
+ });
+
+ it('name details row has correct text', () => {
+ expect(findPublishedDateDetail().text()).toMatchInterpolatedText(
+ 'Published to the gitlab-org/gitlab-test/rails-12009 image repository at 13:29:38 UTC on 2020-11-03',
+ );
+ });
+ });
});
describe('when the tag does not have a digest', () => {
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
index 5ee1b4315ff..31feb65395e 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/mock_data.js
@@ -158,6 +158,7 @@ export const tagsMock = [
revision: 'c2613843ab33aabf847965442b13a8b55a56ae28837ce182627c0716eb08c02b',
shortRevision: 'c2613843a',
createdAt: '2020-11-03T13:29:38+00:00',
+ publishedAt: '2020-11-05T13:29:38+00:00',
totalSize: '1099511627776',
canDelete: true,
__typename: 'ContainerRepositoryTag',
@@ -170,6 +171,7 @@ export const tagsMock = [
revision: 'df44e7228f0f255c73e35b6f0699624a615f42746e3e8e2e4b3804a6d6fc3292',
shortRevision: 'df44e7228',
createdAt: '2020-11-03T13:29:32+00:00',
+ publishedAt: '2020-11-05T13:29:32+00:00',
totalSize: '536870912000',
canDelete: true,
__typename: 'ContainerRepositoryTag',
diff --git a/spec/frontend/user_popovers_spec.js b/spec/frontend/user_popovers_spec.js
index 6f39eb9a118..2a3e48a4a7b 100644
--- a/spec/frontend/user_popovers_spec.js
+++ b/spec/frontend/user_popovers_spec.js
@@ -156,13 +156,14 @@ describe('User Popovers', () => {
});
it('populates popover with preloaded user data', () => {
- const { name, userId, username } = userLink.dataset;
+ const { name, userId, username, email } = userLink.dataset;
expect(userLink.user).toEqual(
expect.objectContaining({
name,
userId,
username,
+ email,
}),
);
});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 0457044f985..3a1318d30db 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -29,6 +29,7 @@ const DEFAULT_PROPS = {
id: 1,
username: 'root',
name: 'Administrator',
+ email: null,
location: 'Vienna',
localTime: '2:30 PM',
webUrl: '/root',
@@ -125,7 +126,17 @@ describe('User Popover Component', () => {
describe('job data', () => {
const findWorkInformation = () => wrapper.findComponent({ ref: 'workInformation' });
const findBio = () => wrapper.findComponent({ ref: 'bio' });
+ const findEmail = () => wrapper.findComponent({ ref: 'email' });
const bio = 'My super interesting bio';
+ const email = 'my@email.com';
+
+ it('should show email', () => {
+ const user = { ...DEFAULT_PROPS.user, email };
+
+ createWrapper({ user });
+
+ expect(findEmail().text()).toBe(email);
+ });
it('should show only bio if work information is not available', () => {
const user = { ...DEFAULT_PROPS.user, bio };
diff --git a/spec/frontend/work_items/components/work_item_assignees_with_edit_spec.js b/spec/frontend/work_items/components/work_item_assignees_with_edit_spec.js
index f9c875fa124..35062a6ebf7 100644
--- a/spec/frontend/work_items/components/work_item_assignees_with_edit_spec.js
+++ b/spec/frontend/work_items/components/work_item_assignees_with_edit_spec.js
@@ -250,12 +250,14 @@ describe('WorkItemAssigneesWithEdit component', () => {
it('does not render `Invite members` link if user has no permission to invite members', () => {
createComponent();
+ expect(findSidebarDropdownWidget().props('showFooter')).toBe(false);
expect(findInviteMembersTrigger().exists()).toBe(false);
});
it('renders `Invite members` link if user has a permission to invite members', () => {
createComponent({ canInviteMembers: true });
+ expect(findSidebarDropdownWidget().props('showFooter')).toBe(true);
expect(findInviteMembersTrigger().exists()).toBe(true);
});
});
diff --git a/spec/lib/backup/files_spec.rb b/spec/lib/backup/files_spec.rb
deleted file mode 100644
index 3c96628b4cf..00000000000
--- a/spec/lib/backup/files_spec.rb
+++ /dev/null
@@ -1,390 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Backup::Files, feature_category: :backup_restore do
- let(:progress) { StringIO.new }
- let!(:project) { create(:project) }
- let(:backup_options) { Backup::Options.new }
-
- let(:status_0) { double('exit 0', success?: true, exitstatus: 0) }
- let(:status_1) { double('exit 1', success?: false, exitstatus: 1) }
- let(:status_2) { double('exit 2', success?: false, exitstatus: 2) }
-
- before do
- allow(progress).to receive(:puts)
- allow(progress).to receive(:print)
- allow(FileUtils).to receive(:mkdir_p).and_return(true)
- allow(FileUtils).to receive(:mv).and_return(true)
- allow(File).to receive(:exist?).and_return(true)
- allow(File).to receive(:realpath).with("/var/gitlab-registry").and_return("/var/gitlab-registry")
- allow(File).to receive(:realpath).with("/var/gitlab-registry/..").and_return("/var")
- allow(File).to receive(:realpath).with("/var/gitlab-pages").and_return("/var/gitlab-pages")
- allow(File).to receive(:realpath).with("/var/gitlab-pages/..").and_return("/var")
-
- allow_any_instance_of(described_class).to receive(:progress).and_return(progress)
- end
-
- RSpec::Matchers.define :eq_statuslist do |expected|
- match do |actual|
- actual.map(&:exitstatus) == expected.map(&:exitstatus)
- end
-
- description do
- 'be an Array of Process::Status with equal exitstatus against expected'
- end
-
- failure_message do |actual|
- "expected #{actual} exitstatuses list to be equal #{expected} exitstatuses list"
- end
- end
-
- describe '#restore' do
- subject { described_class.new(progress, '/var/gitlab-registry', options: backup_options) }
-
- let(:timestamp) { Time.utc(2017, 3, 22) }
-
- around do |example|
- travel_to(timestamp) { example.run }
- end
-
- describe 'folders with permission' do
- before do
- allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
- allow(subject).to receive(:backup_existing_files).and_return(true)
- allow(subject).to receive(:pipeline_succeeded?).and_return(true)
- allow(Dir).to receive(:glob).with("/var/gitlab-registry/*", File::FNM_DOTMATCH).and_return(["/var/gitlab-registry/.", "/var/gitlab-registry/..", "/var/gitlab-registry/sample1"])
- end
-
- it 'moves all necessary files' do
- allow(subject).to receive(:backup_existing_files).and_call_original
- expect(FileUtils).to receive(:mv).with(["/var/gitlab-registry/sample1"], File.join(Gitlab.config.backup.path, "tmp", "registry.#{Time.now.to_i}"))
- subject.restore('registry.tar.gz', 'backup_id')
- end
-
- it 'raises no errors' do
- expect { subject.restore('registry.tar.gz', 'backup_id') }.not_to raise_error
- end
-
- it 'calls tar command with unlink' do
- expect(subject).to receive(:tar).and_return('blabla-tar')
-
- expect(subject).to receive(:run_pipeline!).with(["gzip -cd", %w[blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -]], any_args)
- expect(subject).to receive(:pipeline_succeeded?).and_return(true)
- subject.restore('registry.tar.gz', 'backup_id')
- end
-
- it 'raises an error on failure' do
- expect(subject).to receive(:pipeline_succeeded?).and_return(false)
-
- expect { subject.restore('registry.tar.gz', 'backup_id') }.to raise_error(/Restore operation failed:/)
- end
- end
-
- describe 'folders without permissions' do
- before do
- allow(FileUtils).to receive(:mv).and_raise(Errno::EACCES)
- allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
- allow(subject).to receive(:pipeline_succeeded?).and_return(true)
- end
-
- it 'shows error message' do
- expect(subject).to receive(:access_denied_error).with("/var/gitlab-registry")
- subject.restore('registry.tar.gz', 'backup_id')
- end
- end
-
- describe 'folders that are a mountpoint' do
- before do
- allow(FileUtils).to receive(:mv).and_raise(Errno::EBUSY)
- allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
- allow(subject).to receive(:pipeline_succeeded?).and_return(true)
- end
-
- it 'shows error message' do
- expect(subject).to receive(:resource_busy_error).with("/var/gitlab-registry")
- .and_call_original
-
- expect { subject.restore('registry.tar.gz', 'backup_id') }.to raise_error(/is a mountpoint/)
- end
- end
-
- describe 'with DECOMPRESS_CMD' do
- before do
- stub_env('DECOMPRESS_CMD', 'tee')
- allow(subject).to receive(:pipeline_succeeded?).and_return(true)
- end
-
- it 'passes through tee instead of gzip' do
- expect(subject).to receive(:run_pipeline!).with(['tee', anything], any_args).and_return([[true, true], ''])
-
- expect do
- subject.restore('registry.tar.gz', 'backup_id')
- end.to output(/Using custom DECOMPRESS_CMD 'tee'/).to_stdout
- end
- end
- end
-
- describe '#dump' do
- subject do
- described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp'], options: backup_options)
- end
-
- before do
- allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
- allow(subject).to receive(:pipeline_succeeded?).and_return(true)
- end
-
- it 'raises no errors' do
- expect { subject.dump('registry.tar.gz', 'backup_id') }.not_to raise_error
- end
-
- it 'excludes tmp dirs from archive' do
- expect(subject).to receive(:tar).and_return('blabla-tar')
-
- expect(subject).to receive(:run_pipeline!).with([%w[blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .], 'gzip -c -1'], any_args)
- subject.dump('registry.tar.gz', 'backup_id')
- end
-
- it 'raises an error on failure' do
- allow(subject).to receive(:run_pipeline!).and_return([[true, true], ''])
- expect(subject).to receive(:pipeline_succeeded?).and_return(false)
-
- expect do
- subject.dump('registry.tar.gz', 'backup_id')
- end.to raise_error(/Failed to create compressed file/)
- end
-
- describe 'with STRATEGY=copy' do
- before do
- stub_env('STRATEGY', 'copy')
- allow(Gitlab.config.backup).to receive(:path) { '/var/gitlab-backup' }
- allow(File).to receive(:realpath).with("/var/gitlab-backup").and_return("/var/gitlab-backup")
- end
-
- it 'excludes tmp dirs from rsync' do
- expect(Gitlab::Popen).to receive(:popen)
- .with(%w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup])
- .and_return(['', 0])
-
- subject.dump('registry.tar.gz', 'backup_id')
- end
-
- it 'retries if rsync fails due to vanishing files' do
- expect(Gitlab::Popen).to receive(:popen)
- .with(%w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup])
- .and_return(['rsync failed', 24], ['', 0])
-
- expect do
- subject.dump('registry.tar.gz', 'backup_id')
- end.to output(/files vanished during rsync, retrying/).to_stdout
- end
-
- it 'raises an error and outputs an error message if rsync failed' do
- allow(Gitlab::Popen).to receive(:popen)
- .with(%w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp /var/gitlab-pages /var/gitlab-backup])
- .and_return(['rsync failed', 1])
-
- expect do
- subject.dump('registry.tar.gz', 'backup_id')
- end.to output(/rsync failed/).to_stdout
- .and raise_error(/Failed to create compressed file/)
- end
- end
-
- describe 'with COMPRESS_CMD' do
- before do
- stub_env('COMPRESS_CMD', 'tee')
- end
-
- it 'passes through tee instead of gzip' do
- expect(subject).to receive(:run_pipeline!).with([anything, 'tee'], any_args)
- expect do
- subject.dump('registry.tar.gz', 'backup_id')
- end.to output(/Using custom COMPRESS_CMD 'tee'/).to_stdout
- end
- end
-
- context 'when GZIP_RSYNCABLE is "yes"' do
- before do
- stub_env('GZIP_RSYNCABLE', 'yes')
- end
-
- it 'gzips the files with rsyncable option' do
- expect(subject).to receive(:run_pipeline!).with([anything, 'gzip --rsyncable -c -1'], any_args)
- subject.dump('registry.tar.gz', 'backup_id')
- end
- end
-
- context 'when GZIP_RSYNCABLE is not set' do
- it 'gzips the files without the rsyncable option' do
- expect(subject).to receive(:run_pipeline!).with([anything, 'gzip -c -1'], any_args)
- subject.dump('registry.tar.gz', 'backup_id')
- end
- end
- end
-
- describe '#exclude_dirs' do
- subject do
- described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp'], options: backup_options)
- end
-
- it 'prepends a leading dot slash to tar excludes' do
- expect(subject.exclude_dirs(:tar)).to eq(['--exclude=lost+found', '--exclude=./@pages.tmp'])
- end
-
- it 'prepends a leading slash and app_files_dir basename to rsync excludes' do
- expect(subject.exclude_dirs(:rsync)).to eq(['--exclude=lost+found', '--exclude=/gitlab-pages/@pages.tmp'])
- end
- end
-
- describe '#run_pipeline!' do
- subject do
- described_class.new(progress, '/var/gitlab-registry', options: backup_options)
- end
-
- it 'executes an Open3.pipeline for cmd_list' do
- expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
-
- subject.run_pipeline!([%w[whew command], %w[another cmd]])
- end
-
- it 'returns an empty output on success pipeline' do
- expect(subject.run_pipeline!(%w[true true])[1]).to eq('')
- end
-
- it 'returns the stderr for failed pipeline' do
- expect(
- subject.run_pipeline!(['echo OMG: failed command present 1>&2; false', 'true'])[1]
- ).to match(/OMG: failed/)
- end
-
- it 'returns the success status list on success pipeline' do
- expect(
- subject.run_pipeline!(%w[true true])[0]
- ).to eq_statuslist([status_0, status_0])
- end
-
- it 'returns the failed status in status list for failed commands in pipeline' do
- expect(subject.run_pipeline!(%w[false true true])[0]).to eq_statuslist([status_1, status_0, status_0])
- expect(subject.run_pipeline!(%w[true false true])[0]).to eq_statuslist([status_0, status_1, status_0])
- expect(subject.run_pipeline!(%w[false false true])[0]).to eq_statuslist([status_1, status_1, status_0])
- expect(subject.run_pipeline!(%w[false true false])[0]).to eq_statuslist([status_1, status_0, status_1])
- expect(subject.run_pipeline!(%w[false false false])[0]).to eq_statuslist([status_1, status_1, status_1])
- end
- end
-
- describe '#pipeline_succeeded?' do
- subject do
- described_class.new(progress, '/var/gitlab-registry', options: backup_options)
- end
-
- it 'returns true if both tar and gzip succeeeded' do
- expect(
- subject.pipeline_succeeded?(tar_status: status_0, compress_status: status_0, output: 'any_output')
- ).to be_truthy
- end
-
- it 'returns false if gzip failed' do
- expect(
- subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_1, output: 'any_output')
- ).to be_falsey
- end
-
- context 'if gzip succeeded and tar failed non-critically' do
- before do
- allow(subject).to receive(:tar_ignore_non_success?).and_return(true)
- end
-
- it 'returns true' do
- expect(
- subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
- ).to be_truthy
- end
- end
-
- context 'if gzip succeeded and tar failed in other cases' do
- before do
- allow(subject).to receive(:tar_ignore_non_success?).and_return(false)
- end
-
- it 'returns false' do
- expect(
- subject.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
- ).to be_falsey
- end
- end
- end
-
- describe '#tar_ignore_non_success?' do
- subject do
- described_class.new(progress, '/var/gitlab-registry', options: backup_options)
- end
-
- context 'if `tar` command exits with 1 exitstatus' do
- it 'returns true' do
- expect(
- subject.tar_ignore_non_success?(1, 'any_output')
- ).to be_truthy
- end
-
- it 'outputs a warning' do
- expect do
- subject.tar_ignore_non_success?(1, 'any_output')
- end.to output(/Ignoring tar exit status 1/).to_stdout
- end
- end
-
- context 'if `tar` command exits with 2 exitstatus with non-critical warning' do
- before do
- allow(subject).to receive(:noncritical_warning?).and_return(true)
- end
-
- it 'returns true' do
- expect(
- subject.tar_ignore_non_success?(2, 'any_output')
- ).to be_truthy
- end
-
- it 'outputs a warning' do
- expect do
- subject.tar_ignore_non_success?(2, 'any_output')
- end.to output(/Ignoring non-success exit status/).to_stdout
- end
- end
-
- context 'if `tar` command exits with any other unlisted error' do
- before do
- allow(subject).to receive(:noncritical_warning?).and_return(false)
- end
-
- it 'returns false' do
- expect(
- subject.tar_ignore_non_success?(2, 'any_output')
- ).to be_falsey
- end
- end
- end
-
- describe '#noncritical_warning?' do
- subject do
- described_class.new(progress, '/var/gitlab-registry', options: backup_options)
- end
-
- it 'returns true if given text matches noncritical warnings list' do
- expect(
- subject.noncritical_warning?('tar: .: Cannot mkdir: No such file or directory')
- ).to be_truthy
-
- expect(
- subject.noncritical_warning?('gtar: .: Cannot mkdir: No such file or directory')
- ).to be_truthy
- end
-
- it 'returns false otherwize' do
- expect(
- subject.noncritical_warning?('unknown message')
- ).to be_falsey
- end
- end
-end
diff --git a/spec/lib/backup/gitaly_backup_spec.rb b/spec/lib/backup/gitaly_backup_spec.rb
index 058c7f12f63..f063462929d 100644
--- a/spec/lib/backup/gitaly_backup_spec.rb
+++ b/spec/lib/backup/gitaly_backup_spec.rb
@@ -10,9 +10,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
let(:server_side) { false }
let(:progress) do
- Tempfile.new('progress').tap do |progress|
- progress.unlink
- end
+ Tempfile.new('progress').tap(&:unlink)
end
let(:expected_env) do
@@ -62,8 +60,8 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
subject.finish!
expect(File).to exist(File.join(destination, project.disk_path, backup_id, '001.bundle'))
- expect(File).to exist(File.join(destination, project.disk_path + '.wiki', backup_id, '001.bundle'))
- expect(File).to exist(File.join(destination, project.disk_path + '.design', backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, "#{project.disk_path}.wiki", backup_id, '001.bundle'))
+ expect(File).to exist(File.join(destination, "#{project.disk_path}.design", backup_id, '001.bundle'))
expect(File).to exist(File.join(destination, personal_snippet.disk_path, backup_id, '001.bundle'))
expect(File).to exist(File.join(destination, project_snippet.disk_path, backup_id, '001.bundle'))
end
@@ -189,7 +187,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
custom_hooks_path = '#{repo.relative_path}.custom_hooks.tar'
TOML
- File.write(File.join(repo_backup_root, 'manifests', repo.storage, repo.relative_path, backup_id + '.toml'), manifest)
+ File.write(File.join(repo_backup_root, 'manifests', repo.storage, repo.relative_path, "#{backup_id}.toml"), manifest)
end
it 'restores from repository bundles', :aggregate_failures do
@@ -209,7 +207,7 @@ RSpec.describe Backup::GitalyBackup, feature_category: :backup_restore do
subject.enqueue(project_snippet, Gitlab::GlRepository::SNIPPET)
subject.finish!
- collect_commit_shas = -> (repo) { repo.commits('master', limit: 10).map(&:sha) }
+ collect_commit_shas = ->(repo) { repo.commits('master', limit: 10).map(&:sha) }
expect(collect_commit_shas.call(project.repository)).to match_array(['393a7d860a5a4c3cc736d7eb00604e3472bb95ec'])
expect(collect_commit_shas.call(project.wiki.repository)).to match_array(['c74b9948d0088d703ee1fafeddd9ed9add2901ea'])
diff --git a/spec/lib/backup/manager_spec.rb b/spec/lib/backup/manager_spec.rb
index 7a8cffe8529..99a484c7a6f 100644
--- a/spec/lib/backup/manager_spec.rb
+++ b/spec/lib/backup/manager_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
let(:progress) { StringIO.new }
let(:definitions) { nil }
+ let(:options) { build(:backup_options, :skip_none) }
subject { described_class.new(progress, definitions: definitions) }
@@ -22,32 +23,29 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
describe '#run_create_task' do
- let(:enabled) { true }
- let(:task) { instance_double(Backup::Task) }
+ let(:terraform_state) do
+ Backup::Tasks::TerraformState.new(progress: progress, options: options)
+ .tap { |state| allow(state).to receive(:target).and_return(target) }
+ end
+
+ let(:target) { instance_double(Backup::Targets::Target) }
let(:definitions) do
- {
- 'terraform_state' => Backup::Manager::TaskDefinition.new(
- task: task,
- enabled: enabled,
- destination_path: 'terraform_state.tar.gz',
- human_name: 'terraform state'
- )
- }
+ { 'terraform_state' => terraform_state }
end
it 'calls the named task' do
- expect(task).to receive(:dump)
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform state ... ')
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform state ... done')
+ expect(target).to receive(:dump)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform states ... ')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform states ... done')
subject.run_create_task('terraform_state')
end
describe 'disabled' do
- let(:enabled) { false }
-
it 'informs the user' do
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform state ... [DISABLED]')
+ allow(terraform_state).to receive(:enabled).and_return(false)
+
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform states ... [DISABLED]')
subject.run_create_task('terraform_state')
end
@@ -57,7 +55,7 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
it 'informs the user' do
stub_env('SKIP', 'terraform_state')
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform state ... [SKIPPED]')
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Dumping terraform states ... [SKIPPED]')
subject.run_create_task('terraform_state')
end
@@ -65,17 +63,22 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
describe '#run_restore_task' do
- let(:enabled) { true }
- let(:pre_restore_warning) { nil }
- let(:post_restore_warning) { nil }
- let(:definitions) { { 'terraform_state' => Backup::Manager::TaskDefinition.new(task: task, enabled: enabled, human_name: 'terraform state', destination_path: 'terraform_state.tar.gz') } }
- let(:backup_information) { { backup_created_at: Time.zone.parse('2019-01-01'), gitlab_version: '12.3' } }
- let(:task) do
- instance_double(Backup::Task,
- pre_restore_warning: pre_restore_warning,
- post_restore_warning: post_restore_warning)
+ let(:terraform_state) do
+ Backup::Tasks::TerraformState.new(progress: progress, options: options)
+ .tap { |task| allow(task).to receive(:target).and_return(target) }
end
+ let(:pre_restore_warning) { '' }
+ let(:post_restore_warning) { '' }
+ let(:target) do
+ instance_double(::Backup::Targets::Target,
+ pre_restore_warning: pre_restore_warning,
+ post_restore_warning: post_restore_warning)
+ end
+
+ let(:definitions) { { 'terraform_state' => terraform_state } }
+ let(:backup_information) { { backup_created_at: Time.zone.parse('2019-01-01'), gitlab_version: '12.3' } }
+
before do
allow_next_instance_of(Backup::Metadata) do |metadata|
allow(metadata).to receive(:load_from_file).and_return(backup_information)
@@ -83,18 +86,17 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
it 'calls the named task' do
- expect(task).to receive(:restore)
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... ').ordered
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... done').ordered
+ expect(target).to receive(:restore)
subject.run_restore_task('terraform_state')
end
describe 'disabled' do
- let(:enabled) { false }
-
it 'informs the user' do
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... [DISABLED]').ordered
+ allow(terraform_state).to receive(:enabled).and_return(false)
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... [DISABLED]').ordered
subject.run_restore_task('terraform_state')
end
@@ -104,17 +106,17 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
let(:pre_restore_warning) { 'Watch out!' }
it 'displays and waits for the user' do
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... done').ordered
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
- expect(task).to receive(:restore)
+ expect(target).to receive(:restore)
subject.run_restore_task('terraform_state')
end
it 'does not continue when the user quits' do
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... ').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
@@ -129,21 +131,21 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
let(:post_restore_warning) { 'Watch out!' }
it 'displays and waits for the user' do
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... ').ordered
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... done').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue)
- expect(task).to receive(:restore)
+ expect(target).to receive(:restore)
subject.run_restore_task('terraform_state')
end
it 'does not continue when the user quits' do
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... ').ordered
- expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform state ... done').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... ').ordered
+ expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Restoring terraform states ... done').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Watch out!').ordered
expect(Gitlab::BackupLogger).to receive(:info).with(message: 'Quitting...').ordered
- expect(task).to receive(:restore)
+ expect(target).to receive(:restore)
expect(Gitlab::TaskHelpers).to receive(:ask_to_continue).and_raise(Gitlab::TaskAbortedByUserError)
expect do
@@ -163,13 +165,20 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
let(:pack_tar_system_options) { { out: [pack_tar_file, 'w', Gitlab.config.backup.archive_permissions] } }
let(:pack_tar_cmdline) { ['tar', '-cf', '-', *expected_backup_contents, pack_tar_system_options] }
- let(:task1) { instance_double(Backup::Task) }
- let(:task2) { instance_double(Backup::Task) }
+ let(:lfs) do
+ Backup::Tasks::Lfs.new(progress: progress, options: options)
+ .tap { |task| allow(task).to receive(:target).and_return(target1) }
+ end
+
+ let(:pages) do
+ Backup::Tasks::Pages.new(progress: progress, options: options)
+ .tap { |task| allow(task).to receive(:target).and_return(target2) }
+ end
+
+ let(:target1) { instance_double(Backup::Targets::Target) }
+ let(:target2) { instance_double(Backup::Targets::Target) }
let(:definitions) do
- {
- 'lfs' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'lfs objects', destination_path: 'lfs.tar.gz'),
- 'pages' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'pages', destination_path: 'pages.tar.gz')
- }
+ { 'lfs' => lfs, 'pages' => pages }
end
before do
@@ -178,8 +187,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
allow(Gitlab::BackupLogger).to receive(:info)
allow(Kernel).to receive(:system).and_return(true)
- allow(task1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'lfs.tar.gz'), backup_id)
- allow(task2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'pages.tar.gz'), backup_id)
+ allow(target1).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'lfs.tar.gz'), backup_id)
+ allow(target2).to receive(:dump).with(File.join(Gitlab.config.backup.path, 'pages.tar.gz'), backup_id)
end
it 'creates a backup tar' do
@@ -237,11 +246,11 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
context 'when the destination is optional' do
let(:expected_backup_contents) { %w[backup_information.yml lfs.tar.gz] }
- let(:definitions) do
- {
- 'lfs' => Backup::Manager::TaskDefinition.new(task: task1, destination_path: 'lfs.tar.gz'),
- 'pages' => Backup::Manager::TaskDefinition.new(task: task2, destination_path: 'pages.tar.gz', destination_optional: true)
- }
+ let(:pages) do
+ Backup::Tasks::Pages.new(progress: progress, options: options)
+ .tap do |task|
+ allow(task).to receive_messages(target: target2, destination_optional: true)
+ end
end
it 'executes tar' do
@@ -255,17 +264,17 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
context 'many backup files' do
let(:files) do
- [
- '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar',
- '1451510000_2015_12_30_gitlab_backup.tar',
- '1450742400_2015_12_22_gitlab_backup.tar',
- '1449878400_gitlab_backup.tar',
- '1449014400_gitlab_backup.tar',
- 'manual_gitlab_backup.tar'
+ %w[
+ 1451606400_2016_01_01_1.2.3_gitlab_backup.tar
+ 1451520000_2015_12_31_4.5.6_gitlab_backup.tar
+ 1451520000_2015_12_31_4.5.6-pre_gitlab_backup.tar
+ 1451520000_2015_12_31_4.5.6-rc1_gitlab_backup.tar
+ 1451520000_2015_12_31_4.5.6-pre-ee_gitlab_backup.tar
+ 1451510000_2015_12_30_gitlab_backup.tar
+ 1450742400_2015_12_22_gitlab_backup.tar
+ 1449878400_gitlab_backup.tar
+ 1449014400_gitlab_backup.tar
+ manual_gitlab_backup.tar
]
end
@@ -295,10 +304,10 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
context 'when no valid file is found' do
let(:files) do
- [
- '14516064000_2016_01_01_1.2.3_gitlab_backup.tar',
- 'foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar',
- '1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar'
+ %w[
+ 14516064000_2016_01_01_1.2.3_gitlab_backup.tar
+ foo_1451520000_2015_12_31_4.5.6_gitlab_backup.tar
+ 1451520000_2015_12_31_4.5.6-foo_gitlab_backup.tar
]
end
@@ -654,9 +663,9 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
context 'when there are two backup files in the directory and BACKUP variable is not set' do
before do
allow(Dir).to receive(:glob).and_return(
- [
- '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
- '1451520000_2015_12_31_gitlab_backup.tar'
+ %w[
+ 1451606400_2016_01_01_1.2.3_gitlab_backup.tar
+ 1451520000_2015_12_31_gitlab_backup.tar
]
)
end
@@ -923,13 +932,20 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
describe '#restore' do
- let(:task1) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) }
- let(:task2) { instance_double(Backup::Task, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:lfs) do
+ Backup::Tasks::Lfs.new(progress: progress, options: options)
+ .tap { |task| allow(task).to receive(:target).and_return(target1) }
+ end
+
+ let(:pages) do
+ Backup::Tasks::Pages.new(progress: progress, options: options)
+ .tap { |task| allow(task).to receive(:target).and_return(target2) }
+ end
+
+ let(:target1) { instance_double(Backup::Targets::Target, pre_restore_warning: nil, post_restore_warning: nil) }
+ let(:target2) { instance_double(Backup::Targets::Target, pre_restore_warning: nil, post_restore_warning: nil) }
let(:definitions) do
- {
- 'lfs' => Backup::Manager::TaskDefinition.new(task: task1, human_name: 'lfs content', destination_path: 'lfs.tar.gz'),
- 'pages' => Backup::Manager::TaskDefinition.new(task: task2, human_name: 'pages', destination_path: 'pages.tar.gz')
- }
+ { 'lfs' => lfs, 'pages' => pages }
end
let(:gitlab_version) { Gitlab::VERSION }
@@ -947,8 +963,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
Rake.application.rake_require 'tasks/cache'
allow(Gitlab::BackupLogger).to receive(:info)
- allow(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'lfs.tar.gz'), backup_id)
- allow(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'pages.tar.gz'), backup_id)
+ allow(target1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'lfs.tar.gz'), backup_id)
+ allow(target2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'pages.tar.gz'), backup_id)
allow_next_instance_of(Backup::Metadata) do |metadata|
allow(metadata).to receive(:load_from_file).and_return(backup_information)
end
@@ -971,9 +987,9 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
context 'when there are two backup files in the directory and BACKUP variable is not set' do
before do
allow(Dir).to receive(:glob).and_return(
- [
- '1451606400_2016_01_01_1.2.3_gitlab_backup.tar',
- '1451520000_2015_12_31_gitlab_backup.tar'
+ %w[
+ 1451606400_2016_01_01_1.2.3_gitlab_backup.tar
+ 1451520000_2015_12_31_gitlab_backup.tar
]
)
end
@@ -1047,8 +1063,8 @@ RSpec.describe Backup::Manager, feature_category: :backup_restore do
end
it 'unpacks the BACKUP specified file but uses the backup information backup ID' do
- expect(task1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'lfs.tar.gz'), backup_id)
- expect(task2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'pages.tar.gz'), backup_id)
+ expect(target1).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'lfs.tar.gz'), backup_id)
+ expect(target2).to receive(:restore).with(File.join(Gitlab.config.backup.path, 'pages.tar.gz'), backup_id)
subject.restore
diff --git a/spec/lib/backup/options_spec.rb b/spec/lib/backup/options_spec.rb
index 970eea134dd..0ef10079be0 100644
--- a/spec/lib/backup/options_spec.rb
+++ b/spec/lib/backup/options_spec.rb
@@ -272,4 +272,20 @@ RSpec.describe Backup::Options, feature_category: :backup_restore do
end
end
end
+
+ describe '#skip_task?' do
+ tasks = %w[db uploads builds artifacts lfs terraform_state registry pages repositories packages ci_secure_files]
+
+ tasks.each do |task_name|
+ it "returns true when task #{task_name} is skipped" do
+ options.skippable_tasks[task_name] = true
+
+ expect(options.skip_task?(task_name)).to be(true)
+ end
+
+ it "returns false when task #{task_name} has default skip behavior" do
+ expect(options.skip_task?(task_name)).to be(false)
+ end
+ end
+ end
end
diff --git a/spec/lib/backup/database_spec.rb b/spec/lib/backup/targets/database_spec.rb
index 7e023fda830..204ce62e32f 100644
--- a/spec/lib/backup/database_spec.rb
+++ b/spec/lib/backup/targets/database_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Backup::Database, :reestablished_active_record_base, feature_category: :backup_restore do
+RSpec.describe Backup::Targets::Database, :reestablished_active_record_base, feature_category: :backup_restore do
let(:progress) { StringIO.new }
let(:progress_output) { progress.string }
let(:backup_id) { 'some_id' }
@@ -18,7 +18,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
end
end
- before(:all) do # rubocop:disable RSpec/BeforeAll
+ before_all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/gitlab/backup'
Rake.application.rake_require 'tasks/gitlab/shell'
@@ -29,11 +29,11 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
describe '#dump', :delete do
let(:force) { true }
- subject { described_class.new(progress, force: force, options: backup_options) }
+ subject(:databases) { described_class.new(progress, force: force, options: backup_options) }
it 'creates gzipped database dumps' do
Dir.mktmpdir do |dir|
- subject.dump(dir, backup_id)
+ databases.dump(dir, backup_id)
base_models_for_backup.each_key do |database_name|
filename = database_name == 'main' ? 'database.sql.gz' : "#{database_name}_database.sql.gz"
@@ -59,7 +59,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(backup_connection).to receive(:release_snapshot!).and_call_original
end
- subject.dump(dir, backup_id)
+ databases.dump(dir, backup_id)
end
end
end
@@ -81,7 +81,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(backup_connection).not_to receive(:release_snapshot!)
end
- subject.dump(dir, backup_id)
+ databases.dump(dir, backup_id)
end
end
end
@@ -98,7 +98,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
.to receive(:new).exactly(number_of_databases).times.and_return(timeout_service)
expect(timeout_service).to receive(:restore_timeouts).exactly(number_of_databases).times
- expect { subject.dump(dir, backup_id) }.to raise_error StandardError
+ expect { databases.dump(dir, backup_id) }.to raise_error StandardError
end
end
end
@@ -111,7 +111,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'will override database.yml configuration' do
# Expect an error because we can't connect to test.invalid.
expect do
- Dir.mktmpdir { |dir| subject.dump(dir, backup_id) }
+ Dir.mktmpdir { |dir| databases.dump(dir, backup_id) }
end.to raise_error(Backup::DatabaseBackupError)
expect do
@@ -129,19 +129,19 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
let(:force) { true }
let(:rake_task) { instance_double(Rake::Task, invoke: true) }
- subject { described_class.new(progress, force: force, options: backup_options) }
+ subject(:databases) { described_class.new(progress, force: force, options: backup_options) }
before do
allow(Rake::Task).to receive(:[]).with(any_args).and_return(rake_task)
- allow(subject).to receive(:pg_restore_cmd).and_return(cmd)
+ allow(databases).to receive(:pg_restore_cmd).and_return(cmd)
end
context 'when not forced' do
let(:force) { false }
it 'warns the user and waits' do
- expect(subject).to receive(:sleep)
+ expect(databases).to receive(:sleep)
if one_database_configured?
expect(Rake::Task['gitlab:db:drop_tables']).to receive(:invoke)
@@ -149,13 +149,13 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- subject.restore(backup_dir, backup_id)
+ databases.restore(backup_dir, backup_id)
expect(progress_output).to include('Removing all tables. Press `Ctrl-C` within 5 seconds to abort')
end
it 'has a pre restore warning' do
- expect(subject.pre_restore_warning).not_to be_nil
+ expect(databases.pre_restore_warning).not_to be_nil
end
end
@@ -167,7 +167,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- subject.restore(backup_dir, backup_id)
+ databases.restore(backup_dir, backup_id)
expect(progress_output).to include("Restoring PostgreSQL database")
expect(progress_output).to include("[DONE]")
@@ -181,7 +181,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
it 'outputs a message about DECOMPRESS_CMD' do
expect do
- subject.restore(backup_dir, backup_id)
+ databases.restore(backup_dir, backup_id)
end.to output(/Using custom DECOMPRESS_CMD 'tee'/).to_stdout
end
end
@@ -189,7 +189,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
context 'with a corrupted .gz file' do
before do
- allow(subject).to receive(:file_name).and_return("#{backup_dir}big-image.png")
+ allow(databases).to receive(:file_name).and_return("#{backup_dir}big-image.png")
end
it 'raises a backup error' do
@@ -199,7 +199,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- expect { subject.restore(backup_dir, backup_id) }.to raise_error(Backup::Error)
+ expect { databases.restore(backup_dir, backup_id) }.to raise_error(Backup::Error)
end
end
@@ -215,17 +215,17 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(Rake::Task['gitlab:db:drop_tables:main']).to receive(:invoke)
end
- subject.restore(backup_dir, backup_id)
+ databases.restore(backup_dir, backup_id)
expect(progress_output).to include("ERRORS")
expect(progress_output).not_to include(noise)
expect(progress_output).to include(visible_error)
- expect(subject.post_restore_warning).not_to be_nil
+ expect(databases.post_restore_warning).not_to be_nil
end
end
context 'with PostgreSQL settings defined in the environment' do
- let(:config) { YAML.load_file(File.join(Rails.root, 'config', 'database.yml'))['test'] }
+ let(:config) { YAML.load_file(Rails.root.join('config/database.yml'))['test'] }
before do
stub_env(ENV.to_h.merge({
@@ -244,7 +244,7 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
expect(ENV).to receive(:merge!).with(hash_including { 'PGHOST' => 'test.example.com' })
expect(ENV).not_to receive(:[]=).with('PGPASSWORD', anything)
- subject.restore(backup_dir, backup_id)
+ databases.restore(backup_dir, backup_id)
expect(ENV['PGPORT']).to eq(config['port']) if config['port']
expect(ENV['PGUSER']).to eq(config['username']) if config['username']
@@ -267,14 +267,14 @@ RSpec.describe Backup::Database, :reestablished_active_record_base, feature_cate
end
expect do
- subject.restore('db', backup_id)
+ databases.restore('db', backup_id)
end.to raise_error(Backup::Error, /Source database file does not exist/)
end
end
context 'for ci database' do
it 'ci database tolerates missing source file' do
- expect { subject.restore(backup_dir, backup_id) }.not_to raise_error
+ expect { databases.restore(backup_dir, backup_id) }.not_to raise_error
end
end
end
diff --git a/spec/lib/backup/targets/files_spec.rb b/spec/lib/backup/targets/files_spec.rb
new file mode 100644
index 00000000000..d4acd13c4cb
--- /dev/null
+++ b/spec/lib/backup/targets/files_spec.rb
@@ -0,0 +1,403 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Targets::Files, feature_category: :backup_restore do
+ let(:progress) { StringIO.new }
+ let!(:project) { create(:project) }
+ let(:backup_options) { Backup::Options.new }
+ let(:backup_basepath) { Pathname(Gitlab.config.backup.path) }
+
+ let(:status_0) { instance_double(Process::Status, success?: true, exitstatus: 0) }
+ let(:status_1) { instance_double(Process::Status, success?: false, exitstatus: 1) }
+ let(:status_2) { instance_double(Process::Status, success?: false, exitstatus: 2) }
+
+ before do
+ allow(progress).to receive(:puts)
+ allow(progress).to receive(:print)
+ allow(FileUtils).to receive(:mkdir_p).and_return(true)
+ allow(FileUtils).to receive(:mv).and_return(true)
+ allow(File).to receive(:exist?).and_return(true)
+ allow(File).to receive(:realpath).with("/var/gitlab-registry").and_return("/var/gitlab-registry")
+ allow(File).to receive(:realpath).with("/var/gitlab-registry/..").and_return("/var")
+ allow(File).to receive(:realpath).with("/var/gitlab-pages").and_return("/var/gitlab-pages")
+ allow(File).to receive(:realpath).with("/var/gitlab-pages/..").and_return("/var")
+
+ allow_next_instance_of(described_class) do |instance|
+ allow(instance).to receive(:progress).and_return(progress)
+ end
+ end
+
+ RSpec::Matchers.define :eq_statuslist do |expected|
+ match do |actual|
+ actual.map(&:exitstatus) == expected.map(&:exitstatus)
+ end
+
+ description do
+ 'be an Array of Process::Status with equal exitstatus against expected'
+ end
+
+ failure_message do |actual|
+ "expected #{actual} exitstatuses list to be equal #{expected} exitstatuses list"
+ end
+ end
+
+ describe '#restore' do
+ subject(:files) { described_class.new(progress, '/var/gitlab-registry', options: backup_options) }
+
+ let(:timestamp) { Time.utc(2017, 3, 22) }
+
+ around do |example|
+ travel_to(timestamp) { example.run }
+ end
+
+ describe 'folders with permission' do
+ before do
+ allow(files).to receive(:run_pipeline!).and_return([[true, true], ''])
+ allow(files).to receive(:backup_existing_files).and_return(true)
+ allow(files).to receive(:pipeline_succeeded?).and_return(true)
+ found_files = %w[/var/gitlab-registry/. /var/gitlab-registry/.. /var/gitlab-registry/sample1]
+ allow(Dir).to receive(:glob).with("/var/gitlab-registry/*", File::FNM_DOTMATCH).and_return(found_files)
+ end
+
+ it 'moves all necessary files' do
+ allow(files).to receive(:backup_existing_files).and_call_original
+
+ tmp_dir = backup_basepath.join('tmp', "registry.#{Time.now.to_i}")
+ expect(FileUtils).to receive(:mv).with(['/var/gitlab-registry/sample1'], tmp_dir)
+
+ files.restore('registry.tar.gz', 'backup_id')
+ end
+
+ it 'raises no errors' do
+ expect { files.restore('registry.tar.gz', 'backup_id') }.not_to raise_error
+ end
+
+ it 'calls tar command with unlink' do
+ expect(files).to receive(:tar).and_return('blabla-tar')
+
+ expect(files).to receive(:run_pipeline!).with(
+ ['gzip -cd', %w[blabla-tar --unlink-first --recursive-unlink -C /var/gitlab-registry -xf -]],
+ any_args)
+ expect(files).to receive(:pipeline_succeeded?).and_return(true)
+
+ files.restore('registry.tar.gz', 'backup_id')
+ end
+
+ it 'raises an error on failure' do
+ expect(files).to receive(:pipeline_succeeded?).and_return(false)
+
+ expect { files.restore('registry.tar.gz', 'backup_id') }.to raise_error(/Restore operation failed:/)
+ end
+ end
+
+ describe 'folders without permissions' do
+ before do
+ allow(FileUtils).to receive(:mv).and_raise(Errno::EACCES)
+ allow(files).to receive(:run_pipeline!).and_return([[true, true], ''])
+ allow(files).to receive(:pipeline_succeeded?).and_return(true)
+ end
+
+ it 'shows error message' do
+ expect(files).to receive(:access_denied_error).with("/var/gitlab-registry")
+
+ files.restore('registry.tar.gz', 'backup_id')
+ end
+ end
+
+ describe 'folders that are a mountpoint' do
+ before do
+ allow(FileUtils).to receive(:mv).and_raise(Errno::EBUSY)
+ allow(files).to receive(:run_pipeline!).and_return([[true, true], ''])
+ allow(files).to receive(:pipeline_succeeded?).and_return(true)
+ end
+
+ it 'shows error message' do
+ expect(files).to receive(:resource_busy_error).with("/var/gitlab-registry")
+ .and_call_original
+
+ expect { files.restore('registry.tar.gz', 'backup_id') }.to raise_error(/is a mountpoint/)
+ end
+ end
+
+ describe 'with DECOMPRESS_CMD' do
+ before do
+ stub_env('DECOMPRESS_CMD', 'tee')
+ allow(files).to receive(:pipeline_succeeded?).and_return(true)
+ end
+
+ it 'passes through tee instead of gzip' do
+ expect(files).to receive(:run_pipeline!).with(['tee', anything], any_args).and_return([[true, true], ''])
+
+ expect do
+ files.restore('registry.tar.gz', 'backup_id')
+ end.to output(/Using custom DECOMPRESS_CMD 'tee'/).to_stdout
+ end
+ end
+ end
+
+ describe '#dump' do
+ subject(:files) do
+ described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp'], options: backup_options)
+ end
+
+ before do
+ allow(files).to receive(:run_pipeline!).and_return([[true, true], ''])
+ allow(files).to receive(:pipeline_succeeded?).and_return(true)
+ end
+
+ it 'raises no errors' do
+ expect { files.dump('registry.tar.gz', 'backup_id') }.not_to raise_error
+ end
+
+ it 'excludes tmp dirs from archive' do
+ expect(files).to receive(:tar).and_return('blabla-tar')
+
+ expect(files).to receive(:run_pipeline!).with(
+ [%w[blabla-tar --exclude=lost+found --exclude=./@pages.tmp -C /var/gitlab-pages -cf - .], 'gzip -c -1'],
+ any_args)
+ files.dump('registry.tar.gz', 'backup_id')
+ end
+
+ it 'raises an error on failure' do
+ allow(files).to receive(:run_pipeline!).and_return([[true, true], ''])
+ expect(files).to receive(:pipeline_succeeded?).and_return(false)
+
+ expect do
+ files.dump('registry.tar.gz', 'backup_id')
+ end.to raise_error(/Failed to create compressed file/)
+ end
+
+ describe 'with STRATEGY=copy' do
+ before do
+ stub_env('STRATEGY', 'copy')
+ allow(files).to receive(:backup_basepath).and_return(Pathname('/var/gitlab-backup'))
+ allow(File).to receive(:realpath).with('/var/gitlab-backup').and_return('/var/gitlab-backup')
+ end
+
+ it 'excludes tmp dirs from rsync' do
+ cmd_args = %w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp
+ /var/gitlab-pages /var/gitlab-backup]
+ expect(Gitlab::Popen).to receive(:popen).with(cmd_args).and_return(['', 0])
+
+ files.dump('registry.tar.gz', 'backup_id')
+ end
+
+ it 'retries if rsync fails due to vanishing files' do
+ cmd_args = %w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp
+ /var/gitlab-pages /var/gitlab-backup]
+ expect(Gitlab::Popen).to receive(:popen).with(cmd_args).and_return(['rsync failed', 24], ['', 0])
+
+ expect do
+ files.dump('registry.tar.gz', 'backup_id')
+ end.to output(/files vanished during rsync, retrying/).to_stdout
+ end
+
+ it 'raises an error and outputs an error message if rsync failed' do
+ cmd_args = %w[rsync -a --delete --exclude=lost+found --exclude=/gitlab-pages/@pages.tmp
+ /var/gitlab-pages /var/gitlab-backup]
+ allow(Gitlab::Popen).to receive(:popen).with(cmd_args).and_return(['rsync failed', 1])
+
+ expect do
+ files.dump('registry.tar.gz', 'backup_id')
+ end.to output(/rsync failed/).to_stdout
+ .and raise_error(/Failed to create compressed file/)
+ end
+ end
+
+ describe 'with COMPRESS_CMD' do
+ before do
+ stub_env('COMPRESS_CMD', 'tee')
+ end
+
+ it 'passes through tee instead of gzip' do
+ expect(files).to receive(:run_pipeline!).with([anything, 'tee'], any_args)
+ expect do
+ files.dump('registry.tar.gz', 'backup_id')
+ end.to output(/Using custom COMPRESS_CMD 'tee'/).to_stdout
+ end
+ end
+
+ context 'when GZIP_RSYNCABLE is "yes"' do
+ before do
+ stub_env('GZIP_RSYNCABLE', 'yes')
+ end
+
+ it 'gzips the files with rsyncable option' do
+ expect(files).to receive(:run_pipeline!).with([anything, 'gzip --rsyncable -c -1'], any_args)
+ files.dump('registry.tar.gz', 'backup_id')
+ end
+ end
+
+ context 'when GZIP_RSYNCABLE is not set' do
+ it 'gzips the files without the rsyncable option' do
+ expect(files).to receive(:run_pipeline!).with([anything, 'gzip -c -1'], any_args)
+ files.dump('registry.tar.gz', 'backup_id')
+ end
+ end
+ end
+
+ describe '#exclude_dirs' do
+ subject(:files) do
+ described_class.new(progress, '/var/gitlab-pages', excludes: ['@pages.tmp'], options: backup_options)
+ end
+
+ it 'prepends a leading dot slash to tar excludes' do
+ expect(files.exclude_dirs(:tar)).to eq(%w[--exclude=lost+found --exclude=./@pages.tmp])
+ end
+
+ it 'prepends a leading slash and app_files_dir basename to rsync excludes' do
+ expect(files.exclude_dirs(:rsync)).to eq(%w[--exclude=lost+found --exclude=/gitlab-pages/@pages.tmp])
+ end
+ end
+
+ describe '#run_pipeline!' do
+ subject(:files) do
+ described_class.new(progress, '/var/gitlab-registry', options: backup_options)
+ end
+
+ it 'executes an Open3.pipeline for cmd_list' do
+ expect(Open3).to receive(:pipeline).with(%w[whew command], %w[another cmd], any_args)
+
+ files.run_pipeline!([%w[whew command], %w[another cmd]])
+ end
+
+ it 'returns an empty output on success pipeline' do
+ expect(files.run_pipeline!(%w[true true])[1]).to eq('')
+ end
+
+ it 'returns the stderr for failed pipeline' do
+ expect(
+ files.run_pipeline!(['echo OMG: failed command present 1>&2; false', 'true'])[1]
+ ).to match(/OMG: failed/)
+ end
+
+ it 'returns the success status list on success pipeline' do
+ expect(
+ files.run_pipeline!(%w[true true])[0]
+ ).to eq_statuslist([status_0, status_0])
+ end
+
+ it 'returns the failed status in status list for failed commands in pipeline' do
+ expect(files.run_pipeline!(%w[false true true])[0]).to eq_statuslist([status_1, status_0, status_0])
+ expect(files.run_pipeline!(%w[true false true])[0]).to eq_statuslist([status_0, status_1, status_0])
+ expect(files.run_pipeline!(%w[false false true])[0]).to eq_statuslist([status_1, status_1, status_0])
+ expect(files.run_pipeline!(%w[false true false])[0]).to eq_statuslist([status_1, status_0, status_1])
+ expect(files.run_pipeline!(%w[false false false])[0]).to eq_statuslist([status_1, status_1, status_1])
+ end
+ end
+
+ describe '#pipeline_succeeded?' do
+ subject(:files) do
+ described_class.new(progress, '/var/gitlab-registry', options: backup_options)
+ end
+
+ it 'returns true if both tar and gzip succeeeded' do
+ expect(
+ files.pipeline_succeeded?(tar_status: status_0, compress_status: status_0, output: 'any_output')
+ ).to be_truthy
+ end
+
+ it 'returns false if gzip failed' do
+ expect(
+ files.pipeline_succeeded?(tar_status: status_1, compress_status: status_1, output: 'any_output')
+ ).to be_falsey
+ end
+
+ context 'if gzip succeeded and tar failed non-critically' do
+ before do
+ allow(files).to receive(:tar_ignore_non_success?).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(
+ files.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
+ ).to be_truthy
+ end
+ end
+
+ context 'if gzip succeeded and tar failed in other cases' do
+ before do
+ allow(files).to receive(:tar_ignore_non_success?).and_return(false)
+ end
+
+ it 'returns false' do
+ expect(
+ files.pipeline_succeeded?(tar_status: status_1, compress_status: status_0, output: 'any_output')
+ ).to be_falsey
+ end
+ end
+ end
+
+ describe '#tar_ignore_non_success?' do
+ subject(:files) do
+ described_class.new(progress, '/var/gitlab-registry', options: backup_options)
+ end
+
+ context 'if `tar` command exits with 1 exitstatus' do
+ it 'returns true' do
+ expect(
+ files.tar_ignore_non_success?(1, 'any_output')
+ ).to be_truthy
+ end
+
+ it 'outputs a warning' do
+ expect do
+ files.tar_ignore_non_success?(1, 'any_output')
+ end.to output(/Ignoring tar exit status 1/).to_stdout
+ end
+ end
+
+ context 'if `tar` command exits with 2 exitstatus with non-critical warning' do
+ before do
+ allow(files).to receive(:noncritical_warning?).and_return(true)
+ end
+
+ it 'returns true' do
+ expect(
+ files.tar_ignore_non_success?(2, 'any_output')
+ ).to be_truthy
+ end
+
+ it 'outputs a warning' do
+ expect do
+ files.tar_ignore_non_success?(2, 'any_output')
+ end.to output(/Ignoring non-success exit status/).to_stdout
+ end
+ end
+
+ context 'if `tar` command exits with any other unlisted error' do
+ before do
+ allow(files).to receive(:noncritical_warning?).and_return(false)
+ end
+
+ it 'returns false' do
+ expect(
+ files.tar_ignore_non_success?(2, 'any_output')
+ ).to be_falsey
+ end
+ end
+ end
+
+ describe '#noncritical_warning?' do
+ subject(:files) do
+ described_class.new(progress, '/var/gitlab-registry', options: backup_options)
+ end
+
+ it 'returns true if given text matches noncritical warnings list' do
+ expect(
+ files.noncritical_warning?('tar: .: Cannot mkdir: No such file or directory')
+ ).to be_truthy
+
+ expect(
+ files.noncritical_warning?('gtar: .: Cannot mkdir: No such file or directory')
+ ).to be_truthy
+ end
+
+ it 'returns false otherwize' do
+ expect(
+ files.noncritical_warning?('unknown message')
+ ).to be_falsey
+ end
+ end
+end
diff --git a/spec/lib/backup/repositories_spec.rb b/spec/lib/backup/targets/repositories_spec.rb
index e63d321495e..0f203e114b2 100644
--- a/spec/lib/backup/repositories_spec.rb
+++ b/spec/lib/backup/targets/repositories_spec.rb
@@ -2,9 +2,9 @@
require 'spec_helper'
-RSpec.describe Backup::Repositories, feature_category: :backup_restore do
- let(:progress) { spy(:stdout) }
- let(:strategy) { spy(:strategy) }
+RSpec.describe Backup::Targets::Repositories, feature_category: :backup_restore do
+ let(:progress) { instance_double(StringIO, puts: nil, print: nil) }
+ let(:strategy) { instance_double(Backup::GitalyBackup, start: nil, enqueue: nil, finish!: nil) }
let(:storages) { [] }
let(:paths) { [] }
let(:skip_paths) { [] }
@@ -12,7 +12,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
let(:backup_id) { 'backup_id' }
let(:backup_options) { Backup::Options.new }
- subject do
+ subject(:repositories) do
described_class.new(
progress,
strategy: strategy,
@@ -31,25 +31,26 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
project_snippet = create(:project_snippet, :repository, project: project)
personal_snippet = create(:personal_snippet, :repository, author: project.first_owner)
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:finish!)
end
end
- context 'hashed storage' do
+ context 'with hashed storage' do
let_it_be(:project) { create(:project_with_design, :repository) }
it_behaves_like 'creates repository bundles'
end
- context 'legacy storage' do
+ context 'with legacy storage' do
let_it_be(:project) { create(:project_with_design, :repository, :legacy_storage) }
it_behaves_like 'creates repository bundles'
@@ -59,19 +60,19 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
it 'enqueue_project raises an error' do
allow(strategy).to receive(:enqueue).with(anything, Gitlab::GlRepository::PROJECT).and_raise(IOError)
- expect { subject.dump(destination, backup_id) }.to raise_error(IOError)
+ expect { repositories.dump(destination, backup_id) }.to raise_error(IOError)
end
it 'project query raises an error' do
allow(Project).to receive_message_chain(:includes, :find_each).and_raise(ActiveRecord::StatementTimeout)
- expect { subject.dump(destination, backup_id) }.to raise_error(ActiveRecord::StatementTimeout)
+ expect { repositories.dump(destination, backup_id) }.to raise_error(ActiveRecord::StatementTimeout)
end
end
it 'avoids N+1 database queries' do
control = ActiveRecord::QueryRecorder.new do
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
end
create_list(:project, 2, :repository)
@@ -82,7 +83,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
# for each project.
# We are using 2 projects here.
expect do
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
end.not_to exceed_query_limit(control).with_threshold(2)
end
@@ -102,7 +103,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
excluded_personal_snippet.track_snippet_repository('test_second_storage')
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
@@ -110,7 +111,8 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -118,7 +120,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
describe 'paths' do
let_it_be(:project) { create(:project_with_design, :repository) }
- context 'project path' do
+ context 'with a project path' do
let(:paths) { [project.full_path] }
it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
@@ -126,7 +128,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
@@ -134,12 +136,13 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
- context 'group path' do
+ context 'with a group path' do
let(:paths) { [project.namespace.full_path] }
it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
@@ -147,7 +150,7 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
@@ -155,7 +158,8 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -165,14 +169,14 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
let_it_be(:project) { create(:project_with_design, :repository) }
let_it_be(:excluded_project) { create(:project, :repository) }
- context 'project path' do
+ context 'with a project path' do
let(:skip_paths) { [excluded_project.full_path] }
it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
@@ -180,19 +184,20 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
- context 'group path' do
+ context 'with a group path' do
let(:skip_paths) { [excluded_project.namespace.full_path] }
it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.dump(destination, backup_id)
+ repositories.dump(destination, backup_id)
expect(strategy).to have_received(:start).with(:create, destination, backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
@@ -200,7 +205,8 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
@@ -214,23 +220,25 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
let_it_be(:project_snippet) { create(:project_snippet, :repository, project: project, author: project.first_owner) }
it 'calls enqueue for each repository type', :aggregate_failures do
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default],
+ backup_id: backup_id)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:enqueue).with(project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:finish!)
end
- context 'restoring object pools' do
+ context 'when restoring object pools' do
it 'schedules restoring of the pool', :sidekiq_might_not_need_inline do
pool_repository = create(:pool_repository, :failed)
pool_repository.delete_object_pool
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
pool_repository.reload
expect(pool_repository).not_to be_failed
@@ -241,14 +249,14 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
pool_repository = create(:pool_repository, state: :obsolete)
pool_repository.update_column(:source_project_id, nil)
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
pool_repository.reload
expect(pool_repository).to be_obsolete
end
end
- context 'storages' do
+ context 'for storages' do
let(:storages) { %w[default] }
before do
@@ -262,21 +270,23 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
excluded_personal_snippet.track_snippet_repository('test_second_storage')
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default],
+ backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
- context 'paths' do
- context 'project path' do
+ context 'for paths' do
+ context 'when project path' do
let(:paths) { [project.full_path] }
it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
@@ -284,20 +294,22 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil, backup_id: backup_id)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil,
+ backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
- context 'group path' do
+ context 'with a group path' do
let(:paths) { [project.namespace.full_path] }
it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
@@ -305,59 +317,65 @@ RSpec.describe Backup::Repositories, feature_category: :backup_restore do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
excluded_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil, backup_id: backup_id)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: nil,
+ backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).not_to have_received(:enqueue).with(excluded_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
end
- context 'skip_paths' do
+ context 'for skip_paths' do
let_it_be(:excluded_project) { create(:project, :repository) }
- context 'project path' do
+ context 'with a project path' do
let(:skip_paths) { [excluded_project.full_path] }
it 'calls enqueue for all repositories on the specified project', :aggregate_failures do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default],
+ backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
- context 'group path' do
+ context 'with a group path' do
let(:skip_paths) { [excluded_project.namespace.full_path] }
it 'calls enqueue for all repositories on all descendant projects', :aggregate_failures do
excluded_project_snippet = create(:project_snippet, :repository, project: excluded_project)
included_personal_snippet = create(:personal_snippet, :repository, author: excluded_project.first_owner)
- subject.restore(destination, backup_id)
+ repositories.restore(destination, backup_id)
- expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default], backup_id: backup_id)
+ expect(strategy).to have_received(:start).with(:restore, destination, remove_all_repositories: %w[default],
+ backup_id: backup_id)
expect(strategy).not_to have_received(:enqueue).with(excluded_project, Gitlab::GlRepository::PROJECT)
expect(strategy).not_to have_received(:enqueue).with(excluded_project_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(included_personal_snippet, Gitlab::GlRepository::SNIPPET)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::PROJECT)
expect(strategy).to have_received(:enqueue).with(project, Gitlab::GlRepository::WIKI)
- expect(strategy).to have_received(:enqueue).with(project.design_management_repository, Gitlab::GlRepository::DESIGN)
+ expect(strategy).to have_received(:enqueue).with(project.design_management_repository,
+ Gitlab::GlRepository::DESIGN)
expect(strategy).to have_received(:finish!)
end
end
diff --git a/spec/lib/backup/targets/target_spec.rb b/spec/lib/backup/targets/target_spec.rb
new file mode 100644
index 00000000000..f69d95fb382
--- /dev/null
+++ b/spec/lib/backup/targets/target_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Backup::Targets::Target, feature_category: :backup_restore do
+ let(:progress) { StringIO.new }
+ let(:backup_options) { build(:backup_options) }
+
+ subject(:target) { described_class.new(progress, options: backup_options) }
+
+ describe '#options' do
+ it 'has an accessor for Backup::Options' do
+ expect(target.options).to be_a(Backup::Options)
+ end
+ end
+
+ describe '#dump' do
+ it 'must be implemented by the subclass' do
+ expect { target.dump('some/path', 'backup_id') }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#restore' do
+ it 'must be implemented by the subclass' do
+ expect { target.restore('some/path', 'backup_id') }.to raise_error(NotImplementedError)
+ end
+ end
+
+ describe '#pre_restore_warning' do
+ it { respond_to :pre_restore_warning }
+ end
+
+ describe '#pos_restore_warning' do
+ it { respond_to :pos_restore_warning }
+ end
+end
diff --git a/spec/lib/backup/task_spec.rb b/spec/lib/backup/task_spec.rb
deleted file mode 100644
index 5ded16cd52b..00000000000
--- a/spec/lib/backup/task_spec.rb
+++ /dev/null
@@ -1,22 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Backup::Task, feature_category: :backup_restore do
- let(:progress) { StringIO.new }
- let(:backup_options) { build(:backup_options) }
-
- subject { described_class.new(progress, options: backup_options) }
-
- describe '#dump' do
- it 'must be implemented by the subclass' do
- expect { subject.dump('some/path', 'backup_id') }.to raise_error(NotImplementedError)
- end
- end
-
- describe '#restore' do
- it 'must be implemented by the subclass' do
- expect { subject.restore('some/path', 'backup_id') }.to raise_error(NotImplementedError)
- end
- end
-end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index e873a59b54a..a098ac2a4ab 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -877,6 +877,37 @@ eos
end
end
+ describe '#merged_merge_request' do
+ subject { commit.merged_merge_request(user) }
+
+ let(:user) { project.first_owner }
+
+ before do
+ allow(commit).to receive(:parent_ids).and_return(parent_ids)
+ end
+
+ context 'when commit is a merge commit' do
+ let!(:merge_request) { create(:merge_request, source_project: project, merge_commit_sha: commit.id) }
+ let(:parent_ids) { [1, 2] }
+
+ it { is_expected.to eq(merge_request) }
+ end
+
+ context 'when commit is a squash commit' do
+ let!(:merge_request) { create(:merge_request, source_project: project, squash_commit_sha: commit.id) }
+ let(:parent_ids) { [1] }
+
+ it { is_expected.to eq(merge_request) }
+ end
+
+ context 'when commit does not belong to the merge request' do
+ let!(:merge_request) { create(:merge_request, source_project: project) }
+ let(:parent_ids) { [1] }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
describe '#tipping_refs' do
let_it_be(:tag_name) { 'v1.1.0' }
let_it_be(:branch_names) { %w[master not-merged-branch v1.1.0] }
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 797ab5be235..cd35aeef890 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -258,6 +258,101 @@ RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_rev
end
end
+ describe '#squash_commit' do
+ subject { merge_request.squash_commit }
+
+ let(:merge_request) { build(:merge_request, target_project: project, squash: true, squash_commit_sha: sha) }
+ let(:commit) { project.repository.commit }
+
+ context 'when a commit is present in the repository' do
+ let(:sha) { commit.sha }
+
+ it { is_expected.to eq(commit) }
+ end
+
+ context 'when a commit is not found' do
+ let(:sha) { 'abc123' }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '#commit_to_revert' do
+ subject { merge_request.commit_to_revert }
+
+ context 'when a merge request is not merged' do
+ let(:merge_request) { build(:merge_request, :opened, target_project: project) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when a merge request is merged' do
+ let_it_be(:commit) { project.repository.commit }
+
+ let(:merge_request) do
+ build(
+ :merge_request,
+ :merged,
+ target_project: project,
+ merge_commit_sha: merge_commit_sha,
+ squash_commit_sha: squash_commit_sha
+ )
+ end
+
+ let(:merge_commit_sha) { nil }
+ let(:squash_commit_sha) { nil }
+
+ context 'when merge request has a merge commit' do
+ let(:merge_commit_sha) { commit.sha }
+
+ it { is_expected.to eq(commit) }
+ end
+
+ context 'when merge request has a squash commit' do
+ let(:squash_commit_sha) { commit.sha }
+
+ it { is_expected.to eq(commit) }
+ end
+
+ context 'when merge request does not have merge and squash commits' do
+ let(:merge_request) { create(:merge_request, :merged, target_project: project) }
+ let(:merge_request_diff) { create(:merge_request_diff, merge_request: merge_request, head_commit_sha: commit.sha) }
+
+ context 'when the diff has only one commit' do
+ before do
+ create(:merge_request_diff_commit, merge_request_diff: merge_request_diff, sha: 'abc123')
+ merge_request_diff.save_git_content
+ end
+
+ it { is_expected.to eq(commit) }
+ end
+
+ context 'when the diff has more than one commit' do
+ before do
+ create(:merge_request_diff_commit, merge_request_diff: merge_request_diff, sha: 'abc456')
+ create(:merge_request_diff_commit, merge_request_diff: merge_request_diff, sha: 'abc123', relative_order: 1)
+ merge_request_diff.save_git_content
+ end
+
+ it { is_expected.to be_nil }
+ end
+ end
+ end
+ end
+
+ describe '#commit_to_cherry_pick' do
+ subject { merge_request.commit_to_cherry_pick }
+
+ let(:merge_request) { build(:merge_request) }
+ let(:commit_to_revert_result) { double }
+
+ it 'delegates the call to #commit_to_revert' do
+ expect(merge_request).to receive(:commit_to_revert).and_return(commit_to_revert_result)
+
+ is_expected.to eq(commit_to_revert_result)
+ end
+ end
+
describe '#default_squash_commit_message' do
let(:project) { subject.project }
let(:is_multiline) { -> (c) { c.description.present? } }
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index 532492c9c2c..f555f39ff74 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -20,18 +20,11 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
}
end
- let(:headers) do
- {
- 'x-oauth-scopes' => 'read:org'
- }
- end
-
let(:client) { double('client', user: provider_user, repository: provider_repo) }
before do
Grape::Endpoint.before_each do |endpoint|
allow(endpoint).to receive(:client).and_return(client)
- allow(client).to receive_message_chain(:octokit, :last_response, :headers).and_return(headers)
end
end
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index 949031b961a..06ce00260ed 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -21,19 +21,12 @@ RSpec.describe Import::GithubService, feature_category: :importers do
}
end
- let(:headers) do
- {
- 'x-oauth-scopes' => 'read:org'
- }
- end
-
let(:client) { Gitlab::GithubImport::Client.new(token) }
let(:project_double) { instance_double(Project, persisted?: true) }
subject(:github_importer) { described_class.new(client, user, params) }
before do
- allow(client).to receive_message_chain(:octokit, :last_response, :headers).and_return(headers)
allow(Gitlab::GithubImport::Settings).to receive(:new).with(project_double).and_return(settings)
allow(settings)
.to receive(:write)
@@ -202,42 +195,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
end
end
- context 'validates minimum scope when collaborator import is false' do
- let(:optional_stages) do
- {
- collaborators_import: false
- }
- end
-
- let(:headers) do
- {
- 'x-oauth-scopes' => 'write:packages'
- }
- end
-
- it 'returns error when scope is not adequate' do
- expect(subject.execute(access_params, :github)).to include(minimum_scope_error)
- end
- end
-
- context 'validates collaborator scopes when collaborator import is true' do
- let(:optional_stages) do
- {
- collaborators_import: true
- }
- end
-
- let(:headers) do
- {
- 'x-oauth-scopes' => 'repo, read:user'
- }
- end
-
- it 'returns error when scope is not adequate' do
- expect(subject.execute(access_params, :github)).to include(collab_import_scope_error)
- end
- end
-
context 'when timeout strategy param is present' do
let(:timeout_strategy) { 'pessimistic' }
@@ -373,22 +330,6 @@ RSpec.describe Import::GithubService, feature_category: :importers do
}
end
- def minimum_scope_error
- {
- status: :error,
- http_status: :unprocessable_entity,
- message: 'Your GitHub access token does not have the correct scope to import.'
- }
- end
-
- def collab_import_scope_error
- {
- status: :error,
- http_status: :unprocessable_entity,
- message: 'Your GitHub access token does not have the correct scope to import collaborators.'
- }
- end
-
def blocked_url_error(url)
{
status: :error,
diff --git a/spec/support/helpers/database/duplicate_indexes.yml b/spec/support/helpers/database/duplicate_indexes.yml
index 941ea4305c5..609b018b0cd 100644
--- a/spec/support/helpers/database/duplicate_indexes.yml
+++ b/spec/support/helpers/database/duplicate_indexes.yml
@@ -27,9 +27,6 @@ boards_epic_board_recent_visits:
boards_epic_user_preferences:
index_boards_epic_user_preferences_on_board_user_epic_unique:
- index_boards_epic_user_preferences_on_board_id
-ci_build_trace_metadata:
- index_ci_build_trace_metadata_on_trace_artifact_id:
- - index_ci_build_trace_metadata_on_trace_artifact_id_partition_id
ci_job_artifacts:
index_ci_job_artifacts_on_id_project_id_and_created_at:
- index_ci_job_artifacts_on_project_id
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index a11f2cdb41b..872523e8d16 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -5007,11 +5007,8 @@
- './spec/lib/backup/database_backup_error_spec.rb'
- './spec/lib/backup/database_spec.rb'
- './spec/lib/backup/file_backup_error_spec.rb'
-- './spec/lib/backup/files_spec.rb'
- './spec/lib/backup/gitaly_backup_spec.rb'
- './spec/lib/backup/manager_spec.rb'
-- './spec/lib/backup/repositories_spec.rb'
-- './spec/lib/backup/task_spec.rb'
- './spec/lib/banzai/color_parser_spec.rb'
- './spec/lib/banzai/commit_renderer_spec.rb'
- './spec/lib/banzai/cross_project_reference_spec.rb'
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index 42699239d87..fad289bfa52 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -334,19 +334,19 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
db_file_name = File.join(Gitlab.config.backup.path, 'db', 'database.sql.gz')
db_backup_error = Backup::DatabaseBackupError.new(config, db_file_name)
- where(:backup_class, :rake_task, :error) do
- Backup::Database | 'gitlab:backup:db:create' | db_backup_error
- Backup::Files | 'gitlab:backup:builds:create' | file_backup_error
- Backup::Files | 'gitlab:backup:uploads:create' | file_backup_error
- Backup::Files | 'gitlab:backup:artifacts:create' | file_backup_error
- Backup::Files | 'gitlab:backup:pages:create' | file_backup_error
- Backup::Files | 'gitlab:backup:lfs:create' | file_backup_error
- Backup::Files | 'gitlab:backup:registry:create' | file_backup_error
+ where(:backup_target_class, :rake_task, :error) do
+ Backup::Targets::Database | 'gitlab:backup:db:create' | db_backup_error
+ Backup::Targets::Files | 'gitlab:backup:builds:create' | file_backup_error
+ Backup::Targets::Files | 'gitlab:backup:uploads:create' | file_backup_error
+ Backup::Targets::Files | 'gitlab:backup:artifacts:create' | file_backup_error
+ Backup::Targets::Files | 'gitlab:backup:pages:create' | file_backup_error
+ Backup::Targets::Files | 'gitlab:backup:lfs:create' | file_backup_error
+ Backup::Targets::Files | 'gitlab:backup:registry:create' | file_backup_error
end
with_them do
before do
- allow_next_instance_of(backup_class) do |instance|
+ allow_next_instance_of(backup_target_class) do |instance|
allow(instance).to receive(:dump).and_raise(error)
end
end
@@ -564,7 +564,7 @@ RSpec.describe 'gitlab:backup namespace rake tasks', :delete, feature_category:
stub_env('GITLAB_BACKUP_MAX_CONCURRENCY', 5)
stub_env('GITLAB_BACKUP_MAX_STORAGE_CONCURRENCY', 2)
- expect(::Backup::Repositories).to receive(:new)
+ expect(::Backup::Targets::Repositories).to receive(:new)
.with(anything, strategy: anything, options: anything, storages: [], paths: [], skip_paths: [])
.and_call_original
expect(::Backup::GitalyBackup).to receive(:new).with(