Welcome to mirror list, hosted at ThFree Co, Russian Federation.

gitlab.com/gitlab-org/gitlab-foss.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--app/assets/javascripts/alerts_service_settings/components/alerts_service_form.vue4
-rw-r--r--app/assets/javascripts/api.js9
-rw-r--r--app/assets/javascripts/badges/components/badge_list.vue2
-rw-r--r--app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue16
-rw-r--r--app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue7
-rw-r--r--app/assets/javascripts/contributors/components/contributors.vue2
-rw-r--r--app/assets/javascripts/create_item_dropdown.js8
-rw-r--r--app/assets/javascripts/deploy_keys/components/app.vue2
-rw-r--r--app/assets/javascripts/environments/components/environment_item.vue18
-rw-r--r--app/assets/javascripts/environments/components/environments_table.vue16
-rw-r--r--app/assets/javascripts/error_tracking/components/error_details.vue4
-rw-r--r--app/assets/javascripts/frequent_items/components/app.vue2
-rw-r--r--app/assets/javascripts/ide/components/branches/search_list.vue2
-rw-r--r--app/assets/javascripts/ide/components/file_templates/dropdown.vue2
-rw-r--r--app/assets/javascripts/ide/components/jobs/list.vue2
-rw-r--r--app/assets/javascripts/ide/components/merge_requests/list.vue2
-rw-r--r--app/assets/javascripts/ide/components/pipelines/list.vue2
-rw-r--r--app/assets/javascripts/ide/components/preview/clientside.vue2
-rw-r--r--app/assets/javascripts/ide/components/repo_commit_section.vue2
-rw-r--r--app/assets/javascripts/lib/utils/common_utils.js15
-rw-r--r--app/assets/javascripts/lib/utils/text_utility.js8
-rw-r--r--app/assets/javascripts/logs/components/environment_logs.vue19
-rw-r--r--app/assets/javascripts/logs/components/log_advanced_filters.vue124
-rw-r--r--app/assets/javascripts/logs/components/tokens/token_with_loading_state.vue30
-rw-r--r--app/assets/javascripts/logs/constants.js3
-rw-r--r--app/assets/javascripts/logs/stores/actions.js30
-rw-r--r--app/assets/javascripts/logs/utils.js3
-rw-r--r--app/assets/javascripts/monitoring/components/charts/time_series.vue6
-rw-r--r--app/assets/javascripts/monitoring/components/dashboard.vue13
-rw-r--r--app/assets/javascripts/monitoring/components/panel_type.vue52
-rw-r--r--app/assets/javascripts/monitoring/constants.js5
-rw-r--r--app/assets/javascripts/monitoring/queries/getAnnotations.query.graphql13
-rw-r--r--app/assets/javascripts/monitoring/stores/actions.js53
-rw-r--r--app/assets/javascripts/monitoring/stores/mutation_types.js5
-rw-r--r--app/assets/javascripts/monitoring/stores/mutations.js66
-rw-r--r--app/assets/javascripts/monitoring/stores/state.js1
-rw-r--r--app/assets/javascripts/monitoring/stores/utils.js6
-rw-r--r--app/assets/javascripts/notes/components/diff_with_note.vue3
-rw-r--r--app/assets/javascripts/pages/projects/services/edit/index.js6
-rw-r--r--app/assets/javascripts/pipelines/components/graph/graph_component.vue2
-rw-r--r--app/assets/javascripts/pipelines/components/header_component.vue2
-rw-r--r--app/assets/javascripts/pipelines/components/pipelines.vue2
-rw-r--r--app/assets/javascripts/pipelines/components/test_reports/test_suite_table.vue2
-rw-r--r--app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue2
-rw-r--r--app/assets/javascripts/releases/components/app_edit.vue24
-rw-r--r--app/assets/javascripts/releases/components/asset_links_form.vue106
-rw-r--r--app/assets/javascripts/releases/stores/modules/detail/getters.js70
-rw-r--r--app/assets/javascripts/sentry_error_stack_trace/components/sentry_error_stack_trace.vue2
-rw-r--r--app/assets/javascripts/serverless/components/functions.vue4
-rw-r--r--app/assets/javascripts/smart_interval.js27
-rw-r--r--app/assets/javascripts/snippets/components/show.vue33
-rw-r--r--app/assets/javascripts/snippets/components/snippet_blob_edit.vue2
-rw-r--r--app/assets/javascripts/snippets/components/snippet_blob_view.vue40
-rw-r--r--app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql15
-rw-r--r--app/assets/javascripts/snippets/mixins/snippets.js39
-rw-r--r--app/assets/javascripts/snippets/queries/snippet.blob.query.graphql24
-rw-r--r--app/assets/javascripts/static_site_editor/components/saved_changes_message.vue30
-rw-r--r--app/assets/javascripts/static_site_editor/components/static_site_editor.vue4
-rw-r--r--app/assets/javascripts/static_site_editor/constants.js12
-rw-r--r--app/assets/javascripts/static_site_editor/services/generate_branch_name.js8
-rw-r--r--app/assets/javascripts/static_site_editor/services/submit_content_changes.js76
-rw-r--r--app/assets/javascripts/static_site_editor/store/getters.js2
-rw-r--r--app/assets/javascripts/static_site_editor/store/mutations.js1
-rw-r--r--app/assets/javascripts/static_site_editor/store/state.js4
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/components/mr_widget_terraform_plan.vue152
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue21
-rw-r--r--app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js1
-rw-r--r--app/assets/javascripts/vue_shared/components/file_row.vue1
-rw-r--r--app/assets/javascripts/vue_shared/components/form/form_footer_actions.vue7
-rw-r--r--app/assets/javascripts/vue_shared/components/project_selector/project_selector.vue2
-rw-r--r--app/assets/stylesheets/components/dashboard_skeleton.scss2
-rw-r--r--app/assets/stylesheets/framework/buttons.scss4
-rw-r--r--app/assets/stylesheets/framework/filters.scss10
-rw-r--r--app/assets/stylesheets/framework/typography.scss6
-rw-r--r--app/assets/stylesheets/framework/variables.scss32
-rw-r--r--app/assets/stylesheets/pages/boards.scss2
-rw-r--r--app/assets/stylesheets/pages/commits.scss2
-rw-r--r--app/assets/stylesheets/pages/issuable.scss2
-rw-r--r--app/assets/stylesheets/pages/prometheus.scss7
-rw-r--r--app/assets/stylesheets/pages/tree.scss4
-rw-r--r--app/assets/stylesheets/utilities.scss5
-rw-r--r--app/controllers/admin/application_settings_controller.rb1
-rw-r--r--app/controllers/groups/settings/ci_cd_controller.rb2
-rw-r--r--app/controllers/ide_controller.rb4
-rw-r--r--app/controllers/import/github_controller.rb10
-rw-r--r--app/controllers/projects/environments_controller.rb4
-rw-r--r--app/controllers/projects/import/jira_controller.rb6
-rw-r--r--app/controllers/projects/issues_controller.rb23
-rw-r--r--app/controllers/projects/merge_requests_controller.rb5
-rw-r--r--app/controllers/projects/pages_controller.rb2
-rw-r--r--app/controllers/projects/pages_domains_controller.rb8
-rw-r--r--app/controllers/projects/settings/ci_cd_controller.rb2
-rw-r--r--app/controllers/projects/static_site_editor_controller.rb11
-rw-r--r--app/graphql/resolvers/merge_requests_resolver.rb13
-rw-r--r--app/graphql/resolvers/projects/jira_imports_resolver.rb2
-rw-r--r--app/mailers/emails/pages_domains.rb11
-rw-r--r--app/models/application_setting_implementation.rb1
-rw-r--r--app/models/ci/job_artifact.rb15
-rw-r--r--app/models/ci/pipeline.rb1
-rw-r--r--app/models/deploy_token.rb4
-rw-r--r--app/models/diff_note_position.rb36
-rw-r--r--app/models/group.rb10
-rw-r--r--app/models/jira_import_state.rb2
-rw-r--r--app/models/lfs_object.rb13
-rw-r--r--app/models/namespace.rb3
-rw-r--r--app/models/pages_domain.rb8
-rw-r--r--app/models/project.rb10
-rw-r--r--app/models/project_services/chat_message/pipeline_message.rb15
-rw-r--r--app/models/project_services/prometheus_service.rb2
-rw-r--r--app/policies/concerns/crud_policy_helpers.rb (renamed from app/policies/project_policy/class_methods.rb)6
-rw-r--r--app/policies/group_policy.rb34
-rw-r--r--app/policies/issue_policy.rb2
-rw-r--r--app/policies/project_policy.rb2
-rw-r--r--app/services/auth/container_registry_authentication_service.rb9
-rw-r--r--app/services/clusters/create_service.rb7
-rw-r--r--app/services/clusters/management/validate_management_project_permissions_service.rb54
-rw-r--r--app/services/clusters/update_service.rb41
-rw-r--r--app/services/environments/auto_stop_service.rb2
-rw-r--r--app/services/jira_import/start_import_service.rb19
-rw-r--r--app/services/metrics/dashboard/base_service.rb5
-rw-r--r--app/services/metrics/dashboard/system_dashboard_service.rb5
-rw-r--r--app/services/notification_service.rb6
-rw-r--r--app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb2
-rw-r--r--app/services/pages_domains/retry_acme_order_service.rb21
-rw-r--r--app/services/projects/update_repository_storage_service.rb7
-rw-r--r--app/services/prometheus/create_default_alerts_service.rb5
-rw-r--r--app/uploaders/records_uploads.rb23
-rw-r--r--app/views/admin/application_settings/_issue_limits.html.haml9
-rw-r--r--app/views/admin/application_settings/network.html.haml11
-rw-r--r--app/views/admin/deploy_keys/index.html.haml2
-rw-r--r--app/views/layouts/_page.html.haml2
-rw-r--r--app/views/layouts/nav/_dashboard.html.haml2
-rw-r--r--app/views/notify/pages_domain_auto_ssl_failed_email.html.haml11
-rw-r--r--app/views/notify/pages_domain_auto_ssl_failed_email.text.haml7
-rw-r--r--app/views/profiles/emails/index.html.haml2
-rw-r--r--app/views/projects/_flash_messages.html.haml1
-rw-r--r--app/views/projects/issues/_related_branches.html.haml2
-rw-r--r--app/views/projects/issues/show.html.haml3
-rw-r--r--app/views/projects/merge_requests/_awards_block.html.haml3
-rw-r--r--app/views/projects/pages/_list.html.haml5
-rw-r--r--app/views/projects/pages_domains/_certificate.html.haml2
-rw-r--r--app/views/projects/pages_domains/_lets_encrypt_callout.html.haml23
-rw-r--r--app/views/projects/settings/ci_cd/show.html.haml2
-rw-r--r--app/views/projects/static_site_editor/show.html.haml2
-rw-r--r--app/views/shared/deploy_tokens/_form.html.haml5
-rw-r--r--app/workers/concerns/project_import_options.rb7
-rw-r--r--app/workers/environments/auto_stop_cron_worker.rb2
-rw-r--r--app/workers/project_update_repository_storage_worker.rb10
-rw-r--r--changelogs/unreleased/199195-ide-fix-diff-highlighting.yml5
-rw-r--r--changelogs/unreleased/207528-tf-plan-in-mr.yml5
-rw-r--r--changelogs/unreleased/207549-add-refresh-dashboard-button-second-iteration.yml5
-rw-r--r--changelogs/unreleased/207912-integrate-filtered-search-component.yml5
-rw-r--r--changelogs/unreleased/211460-annotations-post-endpoint-revised.yml5
-rw-r--r--changelogs/unreleased/211998-add-cluster-mangement-id-on-create.yml6
-rw-r--r--changelogs/unreleased/212560_initialize_sse_frontend.yml5
-rw-r--r--changelogs/unreleased/212561-fix-empty-edit-area.yml5
-rw-r--r--changelogs/unreleased/212561-saving-changes-rest-service.yml5
-rw-r--r--changelogs/unreleased/213225-adjust-issues-label-on-jira-import.yml5
-rw-r--r--changelogs/unreleased/213299-env-autostop-bug.yml5
-rw-r--r--changelogs/unreleased/213325-elastic-recommendation-alert-appears-when-the-screen-is-loaded.yml5
-rw-r--r--changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml6
-rw-r--r--changelogs/unreleased/213799-optimize-usage_activity_by_stage-projects_with_repositories_enable.yml5
-rw-r--r--changelogs/unreleased/213800-optimize-usage_activity_by_stage-create-protected_branches.yml5
-rw-r--r--changelogs/unreleased/214218-feature-flag-enable-sort_discussions.yml5
-rw-r--r--changelogs/unreleased/22743-deploy-token-write-registry.yml5
-rw-r--r--changelogs/unreleased/34527-fix-graphql-endpoint-for-merge-requests.yml5
-rw-r--r--changelogs/unreleased/37001.yml5
-rw-r--r--changelogs/unreleased/55241-rate-limit-issue-creation.yml5
-rw-r--r--changelogs/unreleased/bvl-remove-sidekiq-deduplication-feature-flag.yml5
-rw-r--r--changelogs/unreleased/filter-pipeline-merge-requests-by-sha.yml5
-rw-r--r--changelogs/unreleased/fix-keyboard-shortcut-nav-to-groups.yml5
-rw-r--r--changelogs/unreleased/github-rate-limit-on-project-import.yml5
-rw-r--r--changelogs/unreleased/patch-97.yml5
-rw-r--r--changelogs/unreleased/ph-210377-increaseMrPollTimes.yml5
-rw-r--r--changelogs/unreleased/sh-improve-dast-template-error.yml5
-rw-r--r--changelogs/unreleased/slack-notification-retry-success-skip.yml5
-rw-r--r--changelogs/unreleased/update-ci-variable-qa-test.yml5
-rw-r--r--changelogs/unreleased/vs-migrate-deprecated-size-in-loading-icon.yml5
-rw-r--r--config/pseudonymizer.yml1
-rw-r--r--config/routes/project.rb5
-rw-r--r--config/routes/repository.rb4
-rw-r--r--db/migrate/20200325111432_add_issues_create_limit_to_application_settings.rb9
-rw-r--r--db/migrate/20200326122700_create_diff_note_positions.rb31
-rw-r--r--db/migrate/20200406102111_add_index_to_deployments_where_cluster_id_is_not_null.rb17
-rw-r--r--db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb24
-rw-r--r--db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb24
-rw-r--r--db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb24
-rw-r--r--db/migrate/20200406192059_add_write_registry_to_deploy_tokens.rb17
-rw-r--r--db/migrate/20200408153842_add_index_on_creator_id_and_id_on_projects.rb17
-rw-r--r--db/migrate/20200408175424_add_index_on_creator_id_created_at_id_to_projects_table.rb17
-rw-r--r--db/post_migrate/20200406102120_backfill_deployment_clusters_from_deployments.rb34
-rw-r--r--db/structure.sql63
-rw-r--r--doc/administration/availability/index.md8
-rw-r--r--doc/administration/geo/replication/version_specific_updates.md15
-rw-r--r--doc/administration/gitaly/praefect.md34
-rw-r--r--doc/administration/high_availability/gitlab.md16
-rw-r--r--doc/administration/high_availability/nfs.md51
-rw-r--r--doc/administration/high_availability/nfs_host_client_setup.md3
-rw-r--r--doc/administration/scaling/index.md28
-rw-r--r--doc/api/deploy_tokens.md4
-rw-r--r--doc/api/graphql/reference/gitlab_schema.graphql91
-rw-r--r--doc/api/graphql/reference/gitlab_schema.json314
-rw-r--r--doc/api/graphql/reference/index.md26
-rw-r--r--doc/api/group_clusters.md1
-rw-r--r--doc/api/merge_requests.md2
-rw-r--r--doc/api/pipelines.md2
-rw-r--r--doc/api/project_clusters.md1
-rw-r--r--doc/api/vulnerability_issue_links.md217
-rw-r--r--doc/ci/docker/using_docker_build.md46
-rw-r--r--doc/ci/merge_request_pipelines/index.md7
-rw-r--r--doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md46
-rw-r--r--doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_confirmation_dialog_v12_6.pngbin23747 -> 0 bytes
-rw-r--r--doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_v12_6.pngbin24056 -> 63888 bytes
-rw-r--r--doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md132
-rw-r--r--doc/development/README.md2
-rw-r--r--doc/development/api_graphql_styleguide.md9
-rw-r--r--doc/development/code_review.md64
-rw-r--r--doc/development/documentation/site_architecture/index.md44
-rw-r--r--doc/development/elasticsearch.md13
-rw-r--r--doc/development/integrations/secure.md21
-rw-r--r--doc/development/reusing_abstractions.md2
-rw-r--r--doc/development/secure_coding_guidelines.md306
-rw-r--r--doc/development/testing_guide/end_to_end/best_practices.md119
-rw-r--r--doc/development/testing_guide/end_to_end/feature_flags.md6
-rw-r--r--doc/development/testing_guide/end_to_end/index.md55
-rw-r--r--doc/development/testing_guide/end_to_end/quick_start_guide.md17
-rw-r--r--doc/development/testing_guide/flaky_tests.md7
-rw-r--r--doc/development/testing_guide/review_apps.md66
-rw-r--r--doc/gitlab-basics/create-project.md18
-rw-r--r--doc/integration/elasticsearch.md14
-rw-r--r--doc/subscriptions/index.md6
-rw-r--r--doc/topics/autodevops/stages.md5
-rw-r--r--doc/university/README.md2
-rw-r--r--doc/user/analytics/value_stream_analytics.md8
-rw-r--r--doc/user/application_security/container_scanning/index.md14
-rw-r--r--doc/user/application_security/dast/index.md40
-rw-r--r--doc/user/application_security/dependency_scanning/index.md11
-rw-r--r--doc/user/application_security/sast/index.md6
-rw-r--r--doc/user/clusters/applications.md64
-rw-r--r--doc/user/clusters/img/fluentd_v12_10.pngbin0 -> 26758 bytes
-rw-r--r--doc/user/group/saml_sso/index.md21
-rw-r--r--doc/user/group/saml_sso/scim_setup.md21
-rw-r--r--doc/user/packages/container_registry/index.md26
-rw-r--r--doc/user/profile/account/delete_account.md28
-rw-r--r--doc/user/profile/personal_access_tokens.md19
-rw-r--r--doc/user/project/clusters/img/kubernetes_pod_logs_v12_10.pngbin0 -> 143236 bytes
-rw-r--r--doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.pngbin117938 -> 0 bytes
-rw-r--r--doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_10.pngbin0 -> 14897 bytes
-rw-r--r--doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_5.pngbin13681 -> 0 bytes
-rw-r--r--doc/user/project/clusters/kubernetes_pod_logs.md4
-rw-r--r--doc/user/project/deploy_tokens/img/deploy_tokens.pngbin62979 -> 177352 bytes
-rw-r--r--doc/user/project/deploy_tokens/index.md21
-rw-r--r--doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md4
-rw-r--r--doc/user/project/repository/forking_workflow.md3
-rw-r--r--lib/api/api.rb1
-rw-r--r--lib/api/deploy_tokens.rb5
-rw-r--r--lib/api/entities/metrics/dashboard/annotation.rb19
-rw-r--r--lib/api/entities/project_import_status.rb2
-rw-r--r--lib/api/group_clusters.rb1
-rw-r--r--lib/api/metrics/dashboard/annotations.rb41
-rw-r--r--lib/api/project_clusters.rb1
-rw-r--r--lib/gitlab/application_rate_limiter.rb5
-rw-r--r--lib/gitlab/auth.rb3
-rw-r--r--lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments.rb19
-rw-r--r--lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml1
-rw-r--r--lib/gitlab/database/batch_count.rb11
-rw-r--r--lib/gitlab/database/migration_helpers.rb139
-rw-r--r--lib/gitlab/error_tracking.rb38
-rw-r--r--lib/gitlab/jira_import/labels_importer.rb17
-rw-r--r--lib/gitlab/legacy_github_import/client.rb7
-rw-r--r--lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb41
-rw-r--r--lib/gitlab/path_regex.rb9
-rw-r--r--lib/gitlab/set_cache.rb4
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs.rb24
-rw-r--r--lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb2
-rw-r--r--lib/gitlab/static_site_editor/config.rb36
-rw-r--r--locale/gitlab.pot151
-rw-r--r--package.json2
-rw-r--r--qa/qa/page/project/settings/ci_variables.rb78
-rw-r--r--qa/qa/resource/ci_variable.rb3
-rw-r--r--qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb23
-rw-r--r--qa/qa/vendor/jenkins/page/last_job_console.rb7
-rw-r--r--rubocop/cop/performance/ar_count_each.rb45
-rw-r--r--spec/controllers/application_controller_spec.rb2
-rw-r--r--spec/controllers/concerns/enforces_admin_authentication_spec.rb2
-rw-r--r--spec/controllers/groups/settings/ci_cd_controller_spec.rb44
-rw-r--r--spec/controllers/projects/clusters/applications_controller_spec.rb7
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb77
-rw-r--r--spec/controllers/projects/deploy_keys_controller_spec.rb18
-rw-r--r--spec/controllers/projects/import/jira_controller_spec.rb69
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb82
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb18
-rw-r--r--spec/controllers/projects/mirrors_controller_spec.rb22
-rw-r--r--spec/controllers/projects/pages_domains_controller_spec.rb18
-rw-r--r--spec/controllers/projects/pipeline_schedules_controller_spec.rb28
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb19
-rw-r--r--spec/controllers/projects/static_site_editor_controller_spec.rb15
-rw-r--r--spec/controllers/projects_controller_spec.rb10
-rw-r--r--spec/factories/ci/job_artifacts.rb2
-rw-r--r--spec/factories/ci/pipelines.rb2
-rw-r--r--spec/factories/deploy_tokens.rb1
-rw-r--r--spec/factories/diff_note_positions.rb10
-rw-r--r--spec/features/dashboard/projects_spec.rb12
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb4
-rw-r--r--spec/features/ide/user_commits_changes_spec.rb33
-rw-r--r--spec/features/projects/environments/environments_spec.rb14
-rw-r--r--spec/features/projects/pages_lets_encrypt_spec.rb16
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb11
-rw-r--r--spec/frontend/api_spec.js34
-rw-r--r--spec/frontend/ide/components/repo_commit_section_spec.js134
-rw-r--r--spec/frontend/lib/utils/text_utility_spec.js14
-rw-r--r--spec/frontend/logs/components/environment_logs_spec.js15
-rw-r--r--spec/frontend/logs/components/log_advanced_filters_spec.js101
-rw-r--r--spec/frontend/logs/components/tokens/token_with_loading_state_spec.js68
-rw-r--r--spec/frontend/logs/stores/actions_spec.js80
-rw-r--r--spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap2
-rw-r--r--spec/frontend/monitoring/components/charts/time_series_spec.js1
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js123
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js5
-rw-r--r--spec/frontend/monitoring/store/utils_spec.js6
-rw-r--r--spec/frontend/prometheus_metrics/custom_metrics_spec.js96
-rw-r--r--spec/frontend/releases/components/app_edit_spec.js85
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js229
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js154
-rw-r--r--spec/frontend/smart_interval_spec.js197
-rw-r--r--spec/frontend/snippets/components/snippet_blob_view_spec.js39
-rw-r--r--spec/frontend/static_site_editor/components/static_site_editor_spec.js9
-rw-r--r--spec/frontend/static_site_editor/mock_data.js8
-rw-r--r--spec/frontend/static_site_editor/services/generate_branch_name_spec.js22
-rw-r--r--spec/frontend/static_site_editor/services/submit_content_changes_spec.js131
-rw-r--r--spec/frontend/static_site_editor/store/getters_spec.js12
-rw-r--r--spec/frontend/static_site_editor/store/mutations_spec.js1
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_terraform_plan_spec.js89
-rw-r--r--spec/frontend/vue_mr_widget/mr_widget_options_spec.js19
-rw-r--r--spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap19
-rw-r--r--spec/frontend/vue_shared/components/form/form_footer_actions_spec.js29
-rw-r--r--spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb26
-rw-r--r--spec/javascripts/ide/components/repo_commit_section_spec.js113
-rw-r--r--spec/javascripts/smart_interval_spec.js234
-rw-r--r--spec/lib/gitlab/auth_spec.rb117
-rw-r--r--spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb44
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb10
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb329
-rw-r--r--spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb2
-rw-r--r--spec/lib/gitlab/jira_import/labels_importer_spec.rb20
-rw-r--r--spec/lib/gitlab/legacy_github_import/client_spec.rb24
-rw-r--r--spec/lib/gitlab/metrics/dashboard/processor_spec.rb52
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb6
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb20
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/duplicate_jobs_spec.rb31
-rw-r--r--spec/lib/gitlab/static_site_editor/config_spec.rb30
-rw-r--r--spec/mailers/emails/pages_domains_spec.rb31
-rw-r--r--spec/migrations/20200406102120_backfill_deployment_clusters_from_deployments_spec.rb50
-rw-r--r--spec/models/ci/job_artifact_spec.rb21
-rw-r--r--spec/models/ci/pipeline_spec.rb17
-rw-r--r--spec/models/deploy_token_spec.rb4
-rw-r--r--spec/models/diff_note_position_spec.rb22
-rw-r--r--spec/models/pages_domain_spec.rb18
-rw-r--r--spec/models/project_services/chat_message/pipeline_message_spec.rb51
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb2
-rw-r--r--spec/policies/group_policy_spec.rb22
-rw-r--r--spec/policies/project_policy_spec.rb144
-rw-r--r--spec/presenters/ci/pipeline_presenter_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/jira_import/start_spec.rb6
-rw-r--r--spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb37
-rw-r--r--spec/requests/api/group_clusters_spec.rb17
-rw-r--r--spec/requests/api/metrics/dashboard/annotations_spec.rb87
-rw-r--r--spec/requests/api/project_clusters_spec.rb22
-rw-r--r--spec/rubocop/cop/performance/ar_count_each_spec.rb62
-rw-r--r--spec/serializers/build_details_entity_spec.rb2
-rw-r--r--spec/services/application_settings/update_service_spec.rb16
-rw-r--r--spec/services/auth/container_registry_authentication_service_spec.rb10
-rw-r--r--spec/services/clusters/create_service_spec.rb88
-rw-r--r--spec/services/clusters/management/validate_management_project_permissions_service_spec.rb88
-rw-r--r--spec/services/environments/auto_stop_service_spec.rb12
-rw-r--r--spec/services/issues/create_service_spec.rb40
-rw-r--r--spec/services/jira_import/start_import_service_spec.rb28
-rw-r--r--spec/services/notification_service_spec.rb1
-rw-r--r--spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb8
-rw-r--r--spec/services/pages_domains/retry_acme_order_service_spec.rb37
-rw-r--r--spec/services/projects/fork_service_spec.rb2
-rw-r--r--spec/services/projects/update_repository_storage_service_spec.rb15
-rw-r--r--spec/spec_helper.rb8
-rw-r--r--spec/support/helpers/features/web_ide_spec_helpers.rb148
-rw-r--r--spec/support/services/deploy_token_shared_examples.rb2
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb8
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb7
-rw-r--r--spec/support/shared_examples/policies/wiki_policies_shared_examples.rb152
-rw-r--r--spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb11
-rw-r--r--spec/uploaders/records_uploads_spec.rb6
-rw-r--r--spec/views/projects/pages/show.html.haml_spec.rb4
-rw-r--r--spec/views/projects/pages_domains/show.html.haml_spec.rb2
-rw-r--r--spec/workers/concerns/project_import_options_spec.rb11
-rw-r--r--spec/workers/project_update_repository_storage_worker_spec.rb29
-rw-r--r--yarn.lock8
397 files changed, 7482 insertions, 2425 deletions
diff --git a/app/assets/javascripts/alerts_service_settings/components/alerts_service_form.vue b/app/assets/javascripts/alerts_service_settings/components/alerts_service_form.vue
index 1c7a181e3e6..785598142fe 100644
--- a/app/assets/javascripts/alerts_service_settings/components/alerts_service_form.vue
+++ b/app/assets/javascripts/alerts_service_settings/components/alerts_service_form.vue
@@ -6,7 +6,7 @@ import {
GlModal,
GlModalDirective,
} from '@gitlab/ui';
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
import ToggleButton from '~/vue_shared/components/toggle_button.vue';
import axios from '~/lib/utils/axios_utils';
@@ -65,7 +65,7 @@ export default {
'AlertService|%{linkStart}Learn more%{linkEnd} about configuring this endpoint to receive alerts.',
),
{
- linkStart: `<a href="${_.escape(
+ linkStart: `<a href="${esc(
this.learnMoreUrl,
)}" target="_blank" rel="noopener noreferrer">`,
linkEnd: '</a>',
diff --git a/app/assets/javascripts/api.js b/app/assets/javascripts/api.js
index 75f7fe62a7e..6301f6a3910 100644
--- a/app/assets/javascripts/api.js
+++ b/app/assets/javascripts/api.js
@@ -188,6 +188,15 @@ const Api = {
return axios.get(url, { params });
},
+ createProjectMergeRequest(projectPath, options) {
+ const url = Api.buildUrl(Api.projectMergeRequestsPath).replace(
+ ':id',
+ encodeURIComponent(projectPath),
+ );
+
+ return axios.post(url, options);
+ },
+
// Return Merge Request for project
projectMergeRequest(projectPath, mergeRequestId, params = {}) {
const url = Api.buildUrl(Api.projectMergeRequestPath)
diff --git a/app/assets/javascripts/badges/components/badge_list.vue b/app/assets/javascripts/badges/components/badge_list.vue
index d2767dd6c64..04c2d4a7493 100644
--- a/app/assets/javascripts/badges/components/badge_list.vue
+++ b/app/assets/javascripts/badges/components/badge_list.vue
@@ -28,7 +28,7 @@ export default {
{{ s__('Badges|Your badges') }}
<span v-show="!isLoading" class="badge badge-pill">{{ badges.length }}</span>
</div>
- <gl-loading-icon v-show="isLoading" :size="2" class="card-body" />
+ <gl-loading-icon v-show="isLoading" size="lg" class="card-body" />
<div v-if="hasNoBadges" class="card-body">
<span v-if="isGroupBadge">{{ s__('Badges|This group has no badges') }}</span>
<span v-else>{{ s__('Badges|This project has no badges') }}</span>
diff --git a/app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue b/app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue
index 32fe841f16e..316408adfb2 100644
--- a/app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue
+++ b/app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue
@@ -65,12 +65,6 @@ export default {
modalActionText() {
return this.variableBeingEdited ? __('Update variable') : __('Add variable');
},
- primaryAction() {
- return {
- text: this.modalActionText,
- attributes: { variant: 'success', disabled: !this.canSubmit },
- };
- },
maskedFeedback() {
return __('This variable can not be masked');
},
@@ -120,6 +114,8 @@ export default {
ref="modal"
:modal-id="$options.modalId"
:title="modalActionText"
+ static
+ lazy
@hidden="resetModalHandler"
>
<form>
@@ -127,7 +123,7 @@ export default {
<gl-form-input
id="ci-variable-key"
v-model="variableData.key"
- data-qa-selector="variable_key"
+ data-qa-selector="ci_variable_key_field"
/>
</gl-form-group>
@@ -142,7 +138,7 @@ export default {
v-model="variableData.secret_value"
rows="3"
max-rows="6"
- data-qa-selector="variable_value"
+ data-qa-selector="ci_variable_value_field"
/>
</gl-form-group>
@@ -189,7 +185,7 @@ export default {
<gl-form-checkbox
ref="masked-ci-variable"
v-model="variableData.masked"
- data-qa-selector="variable_masked"
+ data-qa-selector="ci_variable_masked_checkbox"
>
{{ __('Mask variable') }}
<gl-link href="/help/ci/variables/README#masked-variables">
@@ -218,6 +214,7 @@ export default {
ref="deleteCiVariable"
category="secondary"
variant="danger"
+ data-qa-selector="ci_variable_delete_button"
@click="deleteVarAndClose"
>{{ __('Delete variable') }}</gl-deprecated-button
>
@@ -225,6 +222,7 @@ export default {
ref="updateOrAddVariable"
:disabled="!canSubmit"
variant="success"
+ data-qa-selector="ci_variable_save_button"
@click="updateOrAddVariable"
>{{ modalActionText }}
</gl-deprecated-button>
diff --git a/app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue b/app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue
index b374d950c1f..7eb791f97e4 100644
--- a/app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue
+++ b/app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue
@@ -26,7 +26,6 @@ export default {
{
key: 'value',
label: s__('CiVariables|Value'),
- tdClass: 'qa-ci-variable-input-value',
customStyle: { width: '40%' },
},
{
@@ -89,6 +88,7 @@ export default {
:fields="fields"
:items="variables"
tbody-tr-class="js-ci-variable-row"
+ data-qa-selector="ci_variable_table_content"
sort-by="key"
sort-direction="asc"
stacked="lg"
@@ -150,6 +150,7 @@ export default {
<gl-deprecated-button
ref="edit-ci-variable"
v-gl-modal-directive="$options.modalId"
+ data-qa-selector="edit_ci_variable_button"
@click="editVariable(item)"
>
<gl-icon :size="$options.iconSize" name="pencil" />
@@ -168,7 +169,7 @@ export default {
<gl-deprecated-button
v-if="tableIsNotEmpty"
ref="secret-value-reveal-button"
- data-qa-selector="reveal_ci_variable_value"
+ data-qa-selector="reveal_ci_variable_value_button"
class="append-right-8"
@click="toggleValues(!valuesHidden)"
>{{ valuesButtonText }}</gl-deprecated-button
@@ -176,7 +177,7 @@ export default {
<gl-deprecated-button
ref="add-ci-variable"
v-gl-modal-directive="$options.modalId"
- data-qa-selector="add_ci_variable"
+ data-qa-selector="add_ci_variable_button"
variant="success"
>{{ __('Add Variable') }}</gl-deprecated-button
>
diff --git a/app/assets/javascripts/contributors/components/contributors.vue b/app/assets/javascripts/contributors/components/contributors.vue
index 19516a13d15..3de1b2f0707 100644
--- a/app/assets/javascripts/contributors/components/contributors.vue
+++ b/app/assets/javascripts/contributors/components/contributors.vue
@@ -197,7 +197,7 @@ export default {
<template>
<div>
<div v-if="loading" class="contributors-loader text-center">
- <gl-loading-icon :inline="true" :size="4" />
+ <gl-loading-icon :inline="true" size="xl" />
</div>
<div v-else-if="showChart" class="contributors-charts">
diff --git a/app/assets/javascripts/create_item_dropdown.js b/app/assets/javascripts/create_item_dropdown.js
index 95b890b04c1..523e5592fd0 100644
--- a/app/assets/javascripts/create_item_dropdown.js
+++ b/app/assets/javascripts/create_item_dropdown.js
@@ -1,4 +1,4 @@
-import _ from 'underscore';
+import { escape as esc } from 'lodash';
import '~/gl_dropdown';
export default class CreateItemDropdown {
@@ -37,14 +37,14 @@ export default class CreateItemDropdown {
},
selectable: true,
toggleLabel(selected) {
- return selected && 'id' in selected ? _.escape(selected.title) : this.defaultToggleLabel;
+ return selected && 'id' in selected ? esc(selected.title) : this.defaultToggleLabel;
},
fieldName: this.fieldName,
text(item) {
- return _.escape(item.text);
+ return esc(item.text);
},
id(item) {
- return _.escape(item.id);
+ return esc(item.id);
},
onFilter: this.toggleCreateNewButton.bind(this),
clicked: options => {
diff --git a/app/assets/javascripts/deploy_keys/components/app.vue b/app/assets/javascripts/deploy_keys/components/app.vue
index 048f3a2485c..5505704f430 100644
--- a/app/assets/javascripts/deploy_keys/components/app.vue
+++ b/app/assets/javascripts/deploy_keys/components/app.vue
@@ -119,7 +119,7 @@ export default {
<gl-loading-icon
v-if="isLoading && !hasKeys"
:label="s__('DeployKeys|Loading deploy keys')"
- :size="2"
+ size="lg"
/>
<template v-else-if="hasKeys">
<div class="top-area scrolling-tabs-container inner-page-scroll-tabs">
diff --git a/app/assets/javascripts/environments/components/environment_item.vue b/app/assets/javascripts/environments/components/environment_item.vue
index 305d860a692..335c668474e 100644
--- a/app/assets/javascripts/environments/components/environment_item.vue
+++ b/app/assets/javascripts/environments/components/environment_item.vue
@@ -58,12 +58,6 @@ export default {
required: true,
},
- shouldShowAutoStopDate: {
- type: Boolean,
- required: false,
- default: false,
- },
-
tableData: {
type: Object,
required: true,
@@ -638,12 +632,7 @@ export default {
</span>
</div>
- <div
- v-if="!isFolder && shouldShowAutoStopDate"
- class="table-section"
- :class="tableData.autoStop.spacing"
- role="gridcell"
- >
+ <div v-if="!isFolder" class="table-section" :class="tableData.autoStop.spacing" role="gridcell">
<div role="rowheader" class="table-mobile-header">{{ tableData.autoStop.title }}</div>
<span
v-if="canShowAutoStopDate"
@@ -662,10 +651,7 @@ export default {
role="gridcell"
>
<div class="btn-group table-action-buttons" role="group">
- <pin-component
- v-if="canShowAutoStopDate && shouldShowAutoStopDate"
- :auto-stop-url="autoStopUrl"
- />
+ <pin-component v-if="canShowAutoStopDate" :auto-stop-url="autoStopUrl" />
<external-url-component
v-if="externalURL && canReadEnvironment"
diff --git a/app/assets/javascripts/environments/components/environments_table.vue b/app/assets/javascripts/environments/components/environments_table.vue
index 01a00e03814..89e40faa23e 100644
--- a/app/assets/javascripts/environments/components/environments_table.vue
+++ b/app/assets/javascripts/environments/components/environments_table.vue
@@ -6,7 +6,6 @@ import { GlLoadingIcon } from '@gitlab/ui';
import { flow, reverse, sortBy } from 'lodash/fp';
import environmentTableMixin from 'ee_else_ce/environments/mixins/environments_table_mixin';
import { s__ } from '~/locale';
-import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
import EnvironmentItem from './environment_item.vue';
export default {
@@ -17,7 +16,7 @@ export default {
CanaryDeploymentCallout: () =>
import('ee_component/environments/components/canary_deployment_callout.vue'),
},
- mixins: [environmentTableMixin, glFeatureFlagsMixin()],
+ mixins: [environmentTableMixin],
props: {
environments: {
type: Array,
@@ -43,9 +42,6 @@ export default {
: env,
);
},
- shouldShowAutoStopDate() {
- return this.glFeatures.autoStopEnvironments;
- },
tableData() {
return {
// percent spacing for cols, should add up to 100
@@ -74,7 +70,7 @@ export default {
spacing: 'section-5',
},
actions: {
- spacing: this.shouldShowAutoStopDate ? 'section-25' : 'section-30',
+ spacing: 'section-25',
},
};
},
@@ -131,12 +127,7 @@ export default {
<div class="table-section" :class="tableData.date.spacing" role="columnheader">
{{ tableData.date.title }}
</div>
- <div
- v-if="shouldShowAutoStopDate"
- class="table-section"
- :class="tableData.autoStop.spacing"
- role="columnheader"
- >
+ <div class="table-section" :class="tableData.autoStop.spacing" role="columnheader">
{{ tableData.autoStop.title }}
</div>
</div>
@@ -146,7 +137,6 @@ export default {
:key="`environment-item-${i}`"
:model="model"
:can-read-environment="canReadEnvironment"
- :should-show-auto-stop-date="shouldShowAutoStopDate"
:table-data="tableData"
/>
diff --git a/app/assets/javascripts/error_tracking/components/error_details.vue b/app/assets/javascripts/error_tracking/components/error_details.vue
index a8103c80da0..148edfe3a51 100644
--- a/app/assets/javascripts/error_tracking/components/error_details.vue
+++ b/app/assets/javascripts/error_tracking/components/error_details.vue
@@ -225,7 +225,7 @@ export default {
<template>
<div>
<div v-if="errorLoading" class="py-3">
- <gl-loading-icon :size="3" />
+ <gl-loading-icon size="lg" />
</div>
<div v-else-if="error" class="error-details">
<gl-alert v-if="isAlertVisible" @dismiss="isAlertVisible = false">
@@ -405,7 +405,7 @@ export default {
</ul>
<div v-if="loadingStacktrace" class="py-3">
- <gl-loading-icon :size="3" />
+ <gl-loading-icon size="lg" />
</div>
<template v-else-if="showStacktrace">
diff --git a/app/assets/javascripts/frequent_items/components/app.vue b/app/assets/javascripts/frequent_items/components/app.vue
index 2ffecce0a56..1f1776a5487 100644
--- a/app/assets/javascripts/frequent_items/components/app.vue
+++ b/app/assets/javascripts/frequent_items/components/app.vue
@@ -107,7 +107,7 @@ export default {
<gl-loading-icon
v-if="isLoadingItems"
:label="translations.loadingMessage"
- :size="2"
+ size="lg"
class="loading-animation prepend-top-20"
/>
<div v-if="!isLoadingItems && !hasSearchQuery" class="section-header">
diff --git a/app/assets/javascripts/ide/components/branches/search_list.vue b/app/assets/javascripts/ide/components/branches/search_list.vue
index 76821bcd986..dd2d726d525 100644
--- a/app/assets/javascripts/ide/components/branches/search_list.vue
+++ b/app/assets/javascripts/ide/components/branches/search_list.vue
@@ -72,7 +72,7 @@ export default {
<div class="dropdown-content ide-merge-requests-dropdown-content d-flex">
<gl-loading-icon
v-if="isLoading"
- :size="2"
+ size="lg"
class="mt-3 mb-3 align-self-center ml-auto mr-auto"
/>
<ul v-else class="mb-0 w-100">
diff --git a/app/assets/javascripts/ide/components/file_templates/dropdown.vue b/app/assets/javascripts/ide/components/file_templates/dropdown.vue
index 35e5f9bcf69..d80662f6ae1 100644
--- a/app/assets/javascripts/ide/components/file_templates/dropdown.vue
+++ b/app/assets/javascripts/ide/components/file_templates/dropdown.vue
@@ -88,7 +88,7 @@ export default {
<i aria-hidden="true" class="fa fa-search dropdown-input-search"></i>
</div>
<div class="dropdown-content">
- <gl-loading-icon v-if="showLoading" :size="2" />
+ <gl-loading-icon v-if="showLoading" size="lg" />
<ul v-else>
<li v-for="(item, index) in outputData" :key="index">
<button type="button" @click="clickItem(item)">{{ item.name }}</button>
diff --git a/app/assets/javascripts/ide/components/jobs/list.vue b/app/assets/javascripts/ide/components/jobs/list.vue
index 2cb5050c3f0..b97b7289886 100644
--- a/app/assets/javascripts/ide/components/jobs/list.vue
+++ b/app/assets/javascripts/ide/components/jobs/list.vue
@@ -26,7 +26,7 @@ export default {
<template>
<div>
- <gl-loading-icon v-if="loading && !stages.length" :size="2" class="prepend-top-default" />
+ <gl-loading-icon v-if="loading && !stages.length" size="lg" class="prepend-top-default" />
<template v-else>
<stage
v-for="stage in stages"
diff --git a/app/assets/javascripts/ide/components/merge_requests/list.vue b/app/assets/javascripts/ide/components/merge_requests/list.vue
index 15c08988977..bf2a33be653 100644
--- a/app/assets/javascripts/ide/components/merge_requests/list.vue
+++ b/app/assets/javascripts/ide/components/merge_requests/list.vue
@@ -90,7 +90,7 @@ export default {
<div class="dropdown-content ide-merge-requests-dropdown-content d-flex">
<gl-loading-icon
v-if="isLoading"
- :size="2"
+ size="lg"
class="mt-3 mb-3 align-self-center ml-auto mr-auto"
/>
<template v-else>
diff --git a/app/assets/javascripts/ide/components/pipelines/list.vue b/app/assets/javascripts/ide/components/pipelines/list.vue
index 343b0b6e90c..d3e5add2e83 100644
--- a/app/assets/javascripts/ide/components/pipelines/list.vue
+++ b/app/assets/javascripts/ide/components/pipelines/list.vue
@@ -56,7 +56,7 @@ export default {
<template>
<div class="ide-pipeline">
- <gl-loading-icon v-if="showLoadingIcon" :size="2" class="prepend-top-default" />
+ <gl-loading-icon v-if="showLoadingIcon" size="lg" class="prepend-top-default" />
<template v-else-if="hasLoadedPipeline">
<header v-if="latestPipeline" class="ide-tree-header ide-pipeline-header">
<ci-icon :status="latestPipeline.details.status" :size="24" class="d-flex" />
diff --git a/app/assets/javascripts/ide/components/preview/clientside.vue b/app/assets/javascripts/ide/components/preview/clientside.vue
index 86a773499bc..3852f2fdfa4 100644
--- a/app/assets/javascripts/ide/components/preview/clientside.vue
+++ b/app/assets/javascripts/ide/components/preview/clientside.vue
@@ -176,6 +176,6 @@ export default {
{{ s__('IDE|Get started with Live Preview') }}
</a>
</div>
- <gl-loading-icon v-else :size="2" class="align-self-center mt-auto mb-auto" />
+ <gl-loading-icon v-else size="lg" class="align-self-center mt-auto mb-auto" />
</div>
</template>
diff --git a/app/assets/javascripts/ide/components/repo_commit_section.vue b/app/assets/javascripts/ide/components/repo_commit_section.vue
index b8dca2709c8..2e7e55a61c5 100644
--- a/app/assets/javascripts/ide/components/repo_commit_section.vue
+++ b/app/assets/javascripts/ide/components/repo_commit_section.vue
@@ -45,7 +45,7 @@ export default {
if (this.lastOpenedFile && this.lastOpenedFile.type !== 'tree') {
this.openPendingTab({
file: this.lastOpenedFile,
- keyPrefix: this.lastOpenedFile.changed ? stageKeys.unstaged : stageKeys.staged,
+ keyPrefix: this.lastOpenedFile.staged ? stageKeys.staged : stageKeys.unstaged,
})
.then(changeViewer => {
if (changeViewer) {
diff --git a/app/assets/javascripts/lib/utils/common_utils.js b/app/assets/javascripts/lib/utils/common_utils.js
index 9b0ee40a30a..4a48852159a 100644
--- a/app/assets/javascripts/lib/utils/common_utils.js
+++ b/app/assets/javascripts/lib/utils/common_utils.js
@@ -910,3 +910,18 @@ export const setCookie = (name, value) => Cookies.set(name, value, { expires: 36
export const getCookie = name => Cookies.get(name);
export const removeCookie = name => Cookies.remove(name);
+
+/**
+ * Returns the status of a feature flag.
+ * Currently, there is no way to access feature
+ * flags in Vuex other than directly tapping into
+ * window.gon.
+ *
+ * This should only be used on Vuex. If feature flags
+ * need to be accessed in Vue components consider
+ * using the Vue feature flag mixin.
+ *
+ * @param {String} flag Feature flag
+ * @returns {Boolean} on/off
+ */
+export const isFeatureFlagEnabled = flag => window.gon.features?.[flag];
diff --git a/app/assets/javascripts/lib/utils/text_utility.js b/app/assets/javascripts/lib/utils/text_utility.js
index f857e618d89..86714471823 100644
--- a/app/assets/javascripts/lib/utils/text_utility.js
+++ b/app/assets/javascripts/lib/utils/text_utility.js
@@ -232,3 +232,11 @@ export const truncateNamespace = (string = '') => {
return namespace;
};
+
+/**
+ * Tests that the input is a String and has at least
+ * one non-whitespace character
+ * @param {String} obj The object to test
+ * @returns {Boolean}
+ */
+export const hasContent = obj => isString(obj) && obj.trim() !== '';
diff --git a/app/assets/javascripts/logs/components/environment_logs.vue b/app/assets/javascripts/logs/components/environment_logs.vue
index 487b4f30b5b..838652f7210 100644
--- a/app/assets/javascripts/logs/components/environment_logs.vue
+++ b/app/assets/javascripts/logs/components/environment_logs.vue
@@ -70,9 +70,10 @@ export default {
return this.logs.isLoading;
},
shouldShowElasticStackCallout() {
- return (
- !this.isElasticStackCalloutDismissed &&
- (this.environments.isLoading || !this.showAdvancedFilters)
+ return !(
+ this.environments.isLoading ||
+ this.isElasticStackCalloutDismissed ||
+ this.showAdvancedFilters
);
},
},
@@ -88,10 +89,9 @@ export default {
methods: {
...mapActions('environmentLogs', [
'setInitData',
- 'setSearch',
- 'showPodLogs',
'showEnvironment',
'fetchEnvironments',
+ 'fetchLogs',
'fetchMoreLogsPrepend',
'dismissRequestEnvironmentsError',
'dismissInvalidTimeRangeWarning',
@@ -120,7 +120,8 @@ export default {
<div class="environment-logs-viewer d-flex flex-column py-3">
<gl-alert
v-if="shouldShowElasticStackCallout"
- class="mb-3 js-elasticsearch-alert"
+ ref="elasticsearchNotice"
+ class="mb-3"
@dismiss="isElasticStackCalloutDismissed = true"
>
{{
@@ -189,13 +190,13 @@ export default {
<log-advanced-filters
v-if="showAdvancedFilters"
ref="log-advanced-filters"
- class="d-md-flex flex-grow-1"
+ class="d-md-flex flex-grow-1 min-width-0"
:disabled="environments.isLoading"
/>
<log-simple-filters
v-else
ref="log-simple-filters"
- class="d-md-flex flex-grow-1"
+ class="d-md-flex flex-grow-1 min-width-0"
:disabled="environments.isLoading"
/>
@@ -203,7 +204,7 @@ export default {
ref="scrollButtons"
class="flex-grow-0 pr-2 mb-2 controllers"
:scroll-down-button-disabled="scrollDownButtonDisabled"
- @refresh="showPodLogs(pods.current)"
+ @refresh="fetchLogs()"
@scrollDown="scrollDown"
/>
</div>
diff --git a/app/assets/javascripts/logs/components/log_advanced_filters.vue b/app/assets/javascripts/logs/components/log_advanced_filters.vue
index dfbd858bf18..49bb80b3bfd 100644
--- a/app/assets/javascripts/logs/components/log_advanced_filters.vue
+++ b/app/assets/javascripts/logs/components/log_advanced_filters.vue
@@ -1,25 +1,15 @@
<script>
-import { s__ } from '~/locale';
-import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import { mapActions, mapState } from 'vuex';
-import {
- GlIcon,
- GlDropdown,
- GlDropdownHeader,
- GlDropdownDivider,
- GlDropdownItem,
- GlSearchBoxByClick,
-} from '@gitlab/ui';
+import { GlFilteredSearch } from '@gitlab/ui';
+import { __, s__ } from '~/locale';
+import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
import { timeRanges } from '~/vue_shared/constants';
+import { TOKEN_TYPE_POD_NAME } from '../constants';
+import TokenWithLoadingState from './tokens/token_with_loading_state.vue';
export default {
components: {
- GlIcon,
- GlDropdown,
- GlDropdownHeader,
- GlDropdownDivider,
- GlDropdownItem,
- GlSearchBoxByClick,
+ GlFilteredSearch,
DateTimePicker,
},
props: {
@@ -32,11 +22,10 @@ export default {
data() {
return {
timeRanges,
- searchQuery: '',
};
},
computed: {
- ...mapState('environmentLogs', ['timeRange', 'pods']),
+ ...mapState('environmentLogs', ['timeRange', 'pods', 'logs']),
timeRangeModel: {
get() {
@@ -46,75 +35,56 @@ export default {
this.setTimeRange(val);
},
},
+ /**
+ * Token options.
+ *
+ * Returns null when no pods are present, so suggestions are displayed in the token
+ */
+ podOptions() {
+ if (this.pods.options.length) {
+ return this.pods.options.map(podName => ({ value: podName, title: podName }));
+ }
+ return null;
+ },
- podDropdownText() {
- return this.pods.current || s__('Environments|All pods');
+ tokens() {
+ return [
+ {
+ icon: 'pod',
+ type: TOKEN_TYPE_POD_NAME,
+ title: s__('Environments|Pod name'),
+ token: TokenWithLoadingState,
+ operators: [{ value: '=', description: __('is'), default: 'true' }],
+ unique: true,
+ options: this.podOptions,
+ loading: this.logs.isLoading,
+ noOptionsText: s__('Environments|No pods to display'),
+ },
+ ];
},
},
methods: {
- ...mapActions('environmentLogs', ['setSearch', 'showPodLogs', 'setTimeRange']),
- isCurrentPod(podName) {
- return podName === this.pods.current;
+ ...mapActions('environmentLogs', ['showFilteredLogs', 'setTimeRange']),
+
+ filteredSearchSubmit(filters) {
+ this.showFilteredLogs(filters);
},
},
};
</script>
<template>
<div>
- <gl-dropdown
- ref="podsDropdown"
- :text="podDropdownText"
- :disabled="disabled"
- class="mb-2 gl-h-32 pr-2 d-flex d-md-block flex-grow-0 qa-pods-dropdown"
- >
- <gl-dropdown-header class="text-center">
- {{ s__('Environments|Filter by pod') }}
- </gl-dropdown-header>
-
- <gl-dropdown-item v-if="!pods.options.length" disabled>
- <span ref="noPodsMsg" class="text-muted">
- {{ s__('Environments|No pods to display') }}
- </span>
- </gl-dropdown-item>
-
- <template v-else>
- <gl-dropdown-item ref="allPodsOption" key="all-pods" @click="showPodLogs(null)">
- <div class="d-flex">
- <gl-icon
- :class="{ invisible: pods.current !== null }"
- name="status_success_borderless"
- />
- <div class="flex-grow-1">{{ s__('Environments|All pods') }}</div>
- </div>
- </gl-dropdown-item>
- <gl-dropdown-divider />
- <gl-dropdown-item
- v-for="podName in pods.options"
- :key="podName"
- class="text-nowrap"
- @click="showPodLogs(podName)"
- >
- <div class="d-flex">
- <gl-icon
- :class="{ invisible: !isCurrentPod(podName) }"
- name="status_success_borderless"
- />
- <div class="flex-grow-1">{{ podName }}</div>
- </div>
- </gl-dropdown-item>
- </template>
- </gl-dropdown>
-
- <gl-search-box-by-click
- ref="searchBox"
- v-model.trim="searchQuery"
- :disabled="disabled"
- :placeholder="s__('Environments|Search')"
- class="mb-2 pr-2 flex-grow-1"
- type="search"
- autofocus
- @submit="setSearch(searchQuery)"
- />
+ <div class="mb-2 pr-2 flex-grow-1 min-width-0">
+ <gl-filtered-search
+ :placeholder="__('Search')"
+ :clear-button-title="__('Clear')"
+ :close-button-title="__('Close')"
+ class="gl-h-32"
+ :disabled="disabled || logs.isLoading"
+ :available-tokens="tokens"
+ @submit="filteredSearchSubmit"
+ />
+ </div>
<date-time-picker
ref="dateTimePicker"
diff --git a/app/assets/javascripts/logs/components/tokens/token_with_loading_state.vue b/app/assets/javascripts/logs/components/tokens/token_with_loading_state.vue
new file mode 100644
index 00000000000..f8ce704942b
--- /dev/null
+++ b/app/assets/javascripts/logs/components/tokens/token_with_loading_state.vue
@@ -0,0 +1,30 @@
+<script>
+import { GlFilteredSearchToken, GlLoadingIcon } from '@gitlab/ui';
+
+export default {
+ components: {
+ GlFilteredSearchToken,
+ GlLoadingIcon,
+ },
+ inheritAttrs: false,
+ props: {
+ config: {
+ type: Object,
+ required: true,
+ },
+ },
+};
+</script>
+
+<template>
+ <gl-filtered-search-token :config="config" v-bind="{ ...$attrs }" v-on="$listeners">
+ <template #suggestions>
+ <div class="m-1">
+ <gl-loading-icon v-if="config.loading" />
+ <div v-else class="py-1 px-2 text-muted">
+ {{ config.noOptionsText }}
+ </div>
+ </div>
+ </template>
+ </gl-filtered-search-token>
+</template>
diff --git a/app/assets/javascripts/logs/constants.js b/app/assets/javascripts/logs/constants.js
new file mode 100644
index 00000000000..450b83f4827
--- /dev/null
+++ b/app/assets/javascripts/logs/constants.js
@@ -0,0 +1,3 @@
+export const dateFormatMask = 'UTC:mmm dd HH:MM:ss.l"Z"';
+
+export const TOKEN_TYPE_POD_NAME = 'TOKEN_TYPE_POD_NAME';
diff --git a/app/assets/javascripts/logs/stores/actions.js b/app/assets/javascripts/logs/stores/actions.js
index be847108a49..a86d3c775a9 100644
--- a/app/assets/javascripts/logs/stores/actions.js
+++ b/app/assets/javascripts/logs/stores/actions.js
@@ -2,6 +2,7 @@ import { backOff } from '~/lib/utils/common_utils';
import httpStatusCodes from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
import { convertToFixedRange } from '~/lib/utils/datetime_range';
+import { TOKEN_TYPE_POD_NAME } from '../constants';
import * as types from './mutation_types';
@@ -49,19 +50,42 @@ const requestLogsUntilData = ({ commit, state }) => {
return requestUntilData(logs_api_path, params);
};
+/**
+ * Converts filters emitted by the component, e.g. a filterered-search
+ * to parameters to be applied to the filters of the store
+ * @param {Array} filters - List of strings or objects to filter by.
+ * @returns {Object} - An object with `search` and `podName` keys.
+ */
+const filtersToParams = (filters = []) => {
+ // Strings become part of the `search`
+ const search = filters
+ .filter(f => typeof f === 'string')
+ .join(' ')
+ .trim();
+
+ // null podName to show all pods
+ const podName = filters.find(f => f?.type === TOKEN_TYPE_POD_NAME)?.value?.data ?? null;
+
+ return { search, podName };
+};
+
export const setInitData = ({ commit }, { timeRange, environmentName, podName }) => {
commit(types.SET_TIME_RANGE, timeRange);
commit(types.SET_PROJECT_ENVIRONMENT, environmentName);
commit(types.SET_CURRENT_POD_NAME, podName);
};
-export const showPodLogs = ({ dispatch, commit }, podName) => {
+export const showFilteredLogs = ({ dispatch, commit }, filters = []) => {
+ const { podName, search } = filtersToParams(filters);
+
commit(types.SET_CURRENT_POD_NAME, podName);
+ commit(types.SET_SEARCH, search);
+
dispatch('fetchLogs');
};
-export const setSearch = ({ dispatch, commit }, searchQuery) => {
- commit(types.SET_SEARCH, searchQuery);
+export const showPodLogs = ({ dispatch, commit }, podName) => {
+ commit(types.SET_CURRENT_POD_NAME, podName);
dispatch('fetchLogs');
};
diff --git a/app/assets/javascripts/logs/utils.js b/app/assets/javascripts/logs/utils.js
index 30213dbc130..8479eeb3b59 100644
--- a/app/assets/javascripts/logs/utils.js
+++ b/app/assets/javascripts/logs/utils.js
@@ -1,7 +1,6 @@
import { secondsToMilliseconds } from '~/lib/utils/datetime_utility';
import dateFormat from 'dateformat';
-
-const dateFormatMask = 'UTC:mmm dd HH:MM:ss.l"Z"';
+import { dateFormatMask } from './constants';
/**
* Returns a time range (`start`, `end`) where `start` is the
diff --git a/app/assets/javascripts/monitoring/components/charts/time_series.vue b/app/assets/javascripts/monitoring/components/charts/time_series.vue
index 24aa8480ce4..9041b01088c 100644
--- a/app/assets/javascripts/monitoring/components/charts/time_series.vue
+++ b/app/assets/javascripts/monitoring/components/charts/time_series.vue
@@ -55,6 +55,11 @@ export default {
required: false,
default: () => [],
},
+ annotations: {
+ type: Array,
+ required: false,
+ default: () => [],
+ },
projectPath: {
type: String,
required: false,
@@ -143,6 +148,7 @@ export default {
return (this.option.series || []).concat(
generateAnnotationsSeries({
deployments: this.recentDeployments,
+ annotations: this.annotations,
}),
);
},
diff --git a/app/assets/javascripts/monitoring/components/dashboard.vue b/app/assets/javascripts/monitoring/components/dashboard.vue
index 9db29f327da..4586ce70ad6 100644
--- a/app/assets/javascripts/monitoring/components/dashboard.vue
+++ b/app/assets/javascripts/monitoring/components/dashboard.vue
@@ -19,12 +19,7 @@ import PanelType from 'ee_else_ce/monitoring/components/panel_type.vue';
import { s__ } from '~/locale';
import createFlash from '~/flash';
import glFeatureFlagsMixin from '~/vue_shared/mixins/gl_feature_flags_mixin';
-import {
- mergeUrlParams,
- redirectTo,
- refreshCurrentPage,
- updateHistory,
-} from '~/lib/utils/url_utility';
+import { mergeUrlParams, redirectTo, updateHistory } from '~/lib/utils/url_utility';
import invalidUrl from '~/lib/utils/invalid_url';
import Icon from '~/vue_shared/components/icon.vue';
import DateTimePicker from '~/vue_shared/components/date_time_picker/date_time_picker.vue';
@@ -218,7 +213,6 @@ export default {
'dashboard',
'emptyState',
'showEmptyState',
- 'deploymentData',
'useDashboardEndpoint',
'allDashboards',
'additionalPanelTypesEnabled',
@@ -273,6 +267,7 @@ export default {
...mapActions('monitoringDashboard', [
'setTimeRange',
'fetchData',
+ 'fetchDashboardData',
'setGettingStartedEmptyState',
'setInitialState',
'setPanelGroupMetrics',
@@ -360,7 +355,7 @@ export default {
},
refreshDashboard() {
- refreshCurrentPage();
+ this.fetchDashboardData();
},
onTimeRangeZoom({ start, end }) {
@@ -475,7 +470,7 @@ export default {
ref="refreshDashboardBtn"
v-gl-tooltip
variant="default"
- :title="s__('Metrics|Reload this page')"
+ :title="s__('Metrics|Refresh dashboard')"
@click="refreshDashboard"
>
<icon name="retry" />
diff --git a/app/assets/javascripts/monitoring/components/panel_type.vue b/app/assets/javascripts/monitoring/components/panel_type.vue
index 44e38089da8..676fc0cca64 100644
--- a/app/assets/javascripts/monitoring/components/panel_type.vue
+++ b/app/assets/javascripts/monitoring/components/panel_type.vue
@@ -4,6 +4,7 @@ import { pickBy } from 'lodash';
import invalidUrl from '~/lib/utils/invalid_url';
import {
GlResizeObserverDirective,
+ GlLoadingIcon,
GlDropdown,
GlDropdownItem,
GlModal,
@@ -37,6 +38,7 @@ export default {
MonitorStackedColumnChart,
MonitorEmptyChart,
Icon,
+ GlLoadingIcon,
GlTooltip,
GlDropdown,
GlDropdownItem,
@@ -87,6 +89,9 @@ export default {
deploymentData(state) {
return state[this.namespace].deploymentData;
},
+ annotations(state) {
+ return state[this.namespace].annotations;
+ },
projectPath(state) {
return state[this.namespace].projectPath;
},
@@ -104,13 +109,17 @@ export default {
// This method is extended by ee functionality
return false;
},
- graphDataHasMetrics() {
+ graphDataHasResult() {
return (
this.graphData.metrics &&
this.graphData.metrics[0].result &&
this.graphData.metrics[0].result.length > 0
);
},
+ graphDataIsLoading() {
+ const { metrics = [] } = this.graphData;
+ return metrics.some(({ loading }) => loading);
+ },
logsPathWithTimeRange() {
const timeRange = this.zoomedTimeRange || this.timeRange;
@@ -140,7 +149,7 @@ export default {
},
isContextualMenuShown() {
return (
- this.graphDataHasMetrics &&
+ this.graphDataHasResult &&
!this.isPanelType('single-stat') &&
!this.isPanelType('heatmap') &&
!this.isPanelType('column') &&
@@ -193,7 +202,7 @@ export default {
</script>
<template>
<div v-gl-resize-observer="onResize" class="prometheus-graph">
- <div class="prometheus-graph-header">
+ <div class="d-flex align-items-center mr-3">
<h5
ref="graphTitle"
class="prometheus-graph-title gl-font-size-large font-weight-bold text-truncate append-right-8"
@@ -203,23 +212,27 @@ export default {
<gl-tooltip :target="() => $refs.graphTitle" :disabled="!showTitleTooltip">
{{ title }}
</gl-tooltip>
+ <alert-widget
+ v-if="isContextualMenuShown && alertWidgetAvailable"
+ class="mx-1"
+ :modal-id="`alert-modal-${index}`"
+ :alerts-endpoint="alertsEndpoint"
+ :relevant-queries="graphData.metrics"
+ :alerts-to-manage="getGraphAlerts(graphData.metrics)"
+ @setAlerts="setAlerts"
+ />
+ <div class="flex-grow-1"></div>
+ <div v-if="graphDataIsLoading" class="mx-1 mt-1">
+ <gl-loading-icon />
+ </div>
<div
v-if="isContextualMenuShown"
- class="prometheus-graph-widgets js-graph-widgets flex-fill"
+ class="js-graph-widgets"
data-qa-selector="prometheus_graph_widgets"
>
<div class="d-flex align-items-center">
- <alert-widget
- v-if="alertWidgetAvailable"
- :modal-id="`alert-modal-${index}`"
- :alerts-endpoint="alertsEndpoint"
- :relevant-queries="graphData.metrics"
- :alerts-to-manage="getGraphAlerts(graphData.metrics)"
- @setAlerts="setAlerts"
- />
<gl-dropdown
v-gl-tooltip
- class="ml-auto mx-3"
toggle-class="btn btn-transparent border-0"
data-qa-selector="prometheus_widgets_dropdown"
right
@@ -275,31 +288,32 @@ export default {
</div>
<monitor-single-stat-chart
- v-if="isPanelType('single-stat') && graphDataHasMetrics"
+ v-if="isPanelType('single-stat') && graphDataHasResult"
:graph-data="graphData"
/>
<monitor-heatmap-chart
- v-else-if="isPanelType('heatmap') && graphDataHasMetrics"
+ v-else-if="isPanelType('heatmap') && graphDataHasResult"
:graph-data="graphData"
/>
<monitor-bar-chart
- v-else-if="isPanelType('bar') && graphDataHasMetrics"
+ v-else-if="isPanelType('bar') && graphDataHasResult"
:graph-data="graphData"
/>
<monitor-column-chart
- v-else-if="isPanelType('column') && graphDataHasMetrics"
+ v-else-if="isPanelType('column') && graphDataHasResult"
:graph-data="graphData"
/>
<monitor-stacked-column-chart
- v-else-if="isPanelType('stacked-column') && graphDataHasMetrics"
+ v-else-if="isPanelType('stacked-column') && graphDataHasResult"
:graph-data="graphData"
/>
<component
:is="timeChartComponent"
- v-else-if="graphDataHasMetrics"
+ v-else-if="graphDataHasResult"
ref="timeChart"
:graph-data="graphData"
:deployment-data="deploymentData"
+ :annotations="annotations"
:project-path="projectPath"
:thresholds="getGraphAlertValues(graphData.metrics)"
:group-id="groupId"
diff --git a/app/assets/javascripts/monitoring/constants.js b/app/assets/javascripts/monitoring/constants.js
index 6af1d399cfc..8d821c27099 100644
--- a/app/assets/javascripts/monitoring/constants.js
+++ b/app/assets/javascripts/monitoring/constants.js
@@ -10,7 +10,10 @@ export const metricStates = {
OK: 'OK',
/**
- * Metric data is being fetched
+ * Metric data is being fetched for the first time.
+ *
+ * Not used during data refresh, if data is available in
+ * the metric, the recommneded state is OK.
*/
LOADING: 'LOADING',
diff --git a/app/assets/javascripts/monitoring/queries/getAnnotations.query.graphql b/app/assets/javascripts/monitoring/queries/getAnnotations.query.graphql
new file mode 100644
index 00000000000..e2edaa707b2
--- /dev/null
+++ b/app/assets/javascripts/monitoring/queries/getAnnotations.query.graphql
@@ -0,0 +1,13 @@
+query getAnnotations($projectPath: ID!) {
+ environment(name: $environmentName) {
+ metricDashboard(id: $dashboardId) {
+ annotations: nodes {
+ id
+ description
+ from
+ to
+ panelId
+ }
+ }
+ }
+}
diff --git a/app/assets/javascripts/monitoring/stores/actions.js b/app/assets/javascripts/monitoring/stores/actions.js
index 8427a72a68e..5b2bd1f1493 100644
--- a/app/assets/javascripts/monitoring/stores/actions.js
+++ b/app/assets/javascripts/monitoring/stores/actions.js
@@ -6,8 +6,13 @@ import { convertToFixedRange } from '~/lib/utils/datetime_range';
import { gqClient, parseEnvironmentsResponse, removeLeadingSlash } from './utils';
import trackDashboardLoad from '../monitoring_tracking_helper';
import getEnvironments from '../queries/getEnvironments.query.graphql';
+import getAnnotations from '../queries/getAnnotations.query.graphql';
import statusCodes from '../../lib/utils/http_status';
-import { backOff, convertObjectPropsToCamelCase } from '../../lib/utils/common_utils';
+import {
+ backOff,
+ convertObjectPropsToCamelCase,
+ isFeatureFlagEnabled,
+} from '../../lib/utils/common_utils';
import { s__, sprintf } from '../../locale';
import { PROMETHEUS_TIMEOUT, ENVIRONMENT_AVAILABLE_STATE } from '../constants';
@@ -80,6 +85,14 @@ export const setShowErrorBanner = ({ commit }, enabled) => {
export const fetchData = ({ dispatch }) => {
dispatch('fetchEnvironmentsData');
dispatch('fetchDashboard');
+ /**
+ * Annotations data is not yet fetched. This will be
+ * ready after the BE piece is implemented.
+ * https://gitlab.com/gitlab-org/gitlab/-/issues/211330
+ */
+ if (isFeatureFlagEnabled('metrics_dashboard_annotations')) {
+ dispatch('fetchAnnotations');
+ }
};
// Metrics dashboard
@@ -128,7 +141,7 @@ export const receiveMetricsDashboardSuccess = ({ commit, dispatch }, { response
commit(types.RECEIVE_METRICS_DASHBOARD_SUCCESS, dashboard);
commit(types.SET_ENDPOINTS, convertObjectPropsToCamelCase(metrics_data));
- return dispatch('fetchPrometheusMetrics');
+ return dispatch('fetchDashboardData');
};
export const receiveMetricsDashboardFailure = ({ commit }, error) => {
commit(types.RECEIVE_METRICS_DASHBOARD_FAILURE, error);
@@ -140,7 +153,7 @@ export const receiveMetricsDashboardFailure = ({ commit }, error) => {
* Loads timeseries data: Prometheus data points and deployment data from the project
* @param {Object} Vuex store
*/
-export const fetchPrometheusMetrics = ({ state, dispatch, getters }) => {
+export const fetchDashboardData = ({ state, dispatch, getters }) => {
dispatch('fetchDeploymentsData');
if (!state.timeRange) {
@@ -269,6 +282,40 @@ export const receiveEnvironmentsDataFailure = ({ commit }) => {
commit(types.RECEIVE_ENVIRONMENTS_DATA_FAILURE);
};
+export const fetchAnnotations = ({ state, dispatch }) => {
+ dispatch('requestAnnotations');
+
+ return gqClient
+ .mutate({
+ mutation: getAnnotations,
+ variables: {
+ projectPath: removeLeadingSlash(state.projectPath),
+ dashboardId: state.currentDashboard,
+ environmentName: state.currentEnvironmentName,
+ },
+ })
+ .then(resp => resp.data?.project?.environment?.metricDashboard?.annotations)
+ .then(annotations => {
+ if (!annotations) {
+ createFlash(s__('Metrics|There was an error fetching annotations. Please try again.'));
+ }
+
+ dispatch('receiveAnnotationsSuccess', annotations);
+ })
+ .catch(err => {
+ Sentry.captureException(err);
+ dispatch('receiveAnnotationsFailure');
+ createFlash(s__('Metrics|There was an error getting annotations information.'));
+ });
+};
+
+// While this commit does not update the state it will
+// eventually be useful to show a loading state
+export const requestAnnotations = ({ commit }) => commit(types.REQUEST_ANNOTATIONS);
+export const receiveAnnotationsSuccess = ({ commit }, data) =>
+ commit(types.RECEIVE_ANNOTATIONS_SUCCESS, data);
+export const receiveAnnotationsFailure = ({ commit }) => commit(types.RECEIVE_ANNOTATIONS_FAILURE);
+
// Dashboard manipulation
/**
diff --git a/app/assets/javascripts/monitoring/stores/mutation_types.js b/app/assets/javascripts/monitoring/stores/mutation_types.js
index 9a3489d53d7..2f9955da1b1 100644
--- a/app/assets/javascripts/monitoring/stores/mutation_types.js
+++ b/app/assets/javascripts/monitoring/stores/mutation_types.js
@@ -3,6 +3,11 @@ export const REQUEST_METRICS_DASHBOARD = 'REQUEST_METRICS_DASHBOARD';
export const RECEIVE_METRICS_DASHBOARD_SUCCESS = 'RECEIVE_METRICS_DASHBOARD_SUCCESS';
export const RECEIVE_METRICS_DASHBOARD_FAILURE = 'RECEIVE_METRICS_DASHBOARD_FAILURE';
+// Annotations
+export const REQUEST_ANNOTATIONS = 'REQUEST_ANNOTATIONS';
+export const RECEIVE_ANNOTATIONS_SUCCESS = 'RECEIVE_ANNOTATIONS_SUCCESS';
+export const RECEIVE_ANNOTATIONS_FAILURE = 'RECEIVE_ANNOTATIONS_FAILURE';
+
// Git project deployments
export const REQUEST_DEPLOYMENTS_DATA = 'REQUEST_DEPLOYMENTS_DATA';
export const RECEIVE_DEPLOYMENTS_DATA_SUCCESS = 'RECEIVE_DEPLOYMENTS_DATA_SUCCESS';
diff --git a/app/assets/javascripts/monitoring/stores/mutations.js b/app/assets/javascripts/monitoring/stores/mutations.js
index 0a7bb47d533..aa31b6642d7 100644
--- a/app/assets/javascripts/monitoring/stores/mutations.js
+++ b/app/assets/javascripts/monitoring/stores/mutations.js
@@ -1,4 +1,3 @@
-import Vue from 'vue';
import pick from 'lodash/pick';
import * as types from './mutation_types';
import { mapToDashboardViewModel, normalizeQueryResult } from './utils';
@@ -27,24 +26,6 @@ const findMetricInDashboard = (metricId, dashboard) => {
};
/**
- * Set a new state for a metric.
- *
- * Initally metric data is not populated, so `Vue.set` is
- * used to add new properties to the metric.
- *
- * @param {Object} metric - Metric object as defined in the dashboard
- * @param {Object} state - New state
- * @param {Array|null} state.result - Array of results
- * @param {String} state.error - Error code from metricStates
- * @param {Boolean} state.loading - True if the metric is loading
- */
-const setMetricState = (metric, { result = null, loading = false, state = null }) => {
- Vue.set(metric, 'result', result);
- Vue.set(metric, 'loading', loading);
- Vue.set(metric, 'state', state);
-};
-
-/**
* Maps a backened error state to a `metricStates` constant
* @param {Object} error - Error from backend response
*/
@@ -112,43 +93,46 @@ export default {
},
/**
+ * Annotations
+ */
+ [types.RECEIVE_ANNOTATIONS_SUCCESS](state, annotations) {
+ state.annotations = annotations;
+ },
+ [types.RECEIVE_ANNOTATIONS_FAILURE](state) {
+ state.annotations = [];
+ },
+
+ /**
* Individual panel/metric results
*/
[types.REQUEST_METRIC_RESULT](state, { metricId }) {
const metric = findMetricInDashboard(metricId, state.dashboard);
- setMetricState(metric, {
- loading: true,
- state: metricStates.LOADING,
- });
+ metric.loading = true;
+ if (!metric.result) {
+ metric.state = metricStates.LOADING;
+ }
},
[types.RECEIVE_METRIC_RESULT_SUCCESS](state, { metricId, result }) {
- if (!metricId) {
- return;
- }
-
+ const metric = findMetricInDashboard(metricId, state.dashboard);
+ metric.loading = false;
state.showEmptyState = false;
- const metric = findMetricInDashboard(metricId, state.dashboard);
if (!result || result.length === 0) {
- setMetricState(metric, {
- state: metricStates.NO_DATA,
- });
+ metric.state = metricStates.NO_DATA;
+ metric.result = null;
} else {
const normalizedResults = result.map(normalizeQueryResult);
- setMetricState(metric, {
- result: Object.freeze(normalizedResults),
- state: metricStates.OK,
- });
+
+ metric.state = metricStates.OK;
+ metric.result = Object.freeze(normalizedResults);
}
},
[types.RECEIVE_METRIC_RESULT_FAILURE](state, { metricId, error }) {
- if (!metricId) {
- return;
- }
const metric = findMetricInDashboard(metricId, state.dashboard);
- setMetricState(metric, {
- state: emptyStateFromError(error),
- });
+
+ metric.state = emptyStateFromError(error);
+ metric.loading = false;
+ metric.result = null;
},
[types.SET_INITIAL_STATE](state, initialState = {}) {
Object.assign(state, pick(initialState, initialStateKeys));
diff --git a/app/assets/javascripts/monitoring/stores/state.js b/app/assets/javascripts/monitoring/stores/state.js
index 2b1907e8df7..e60510e747b 100644
--- a/app/assets/javascripts/monitoring/stores/state.js
+++ b/app/assets/javascripts/monitoring/stores/state.js
@@ -20,6 +20,7 @@ export default () => ({
allDashboards: [],
// Other project data
+ annotations: [],
deploymentData: [],
environments: [],
environmentsSearchTerm: '',
diff --git a/app/assets/javascripts/monitoring/stores/utils.js b/app/assets/javascripts/monitoring/stores/utils.js
index d01acdd031b..a212e9be703 100644
--- a/app/assets/javascripts/monitoring/stores/utils.js
+++ b/app/assets/javascripts/monitoring/stores/utils.js
@@ -76,6 +76,12 @@ const mapToMetricsViewModel = metrics =>
queryRange: query_range,
prometheusEndpointPath: prometheus_endpoint_path,
metricId: uniqMetricsId({ metric_id, id }),
+
+ // metric data
+ loading: false,
+ result: null,
+ state: null,
+
...metric,
}));
diff --git a/app/assets/javascripts/notes/components/diff_with_note.vue b/app/assets/javascripts/notes/components/diff_with_note.vue
index a58a040fb4e..cd5cfc09ea0 100644
--- a/app/assets/javascripts/notes/components/diff_with_note.vue
+++ b/app/assets/javascripts/notes/components/diff_with_note.vue
@@ -1,5 +1,4 @@
<script>
-/* eslint-disable @gitlab/vue-require-i18n-strings */
import { mapState, mapActions } from 'vuex';
import { GlSkeletonLoading } from '@gitlab/ui';
import DiffFileHeader from '~/diffs/components/diff_file_header.vue';
@@ -96,7 +95,7 @@ export default {
<td class="old_line diff-line-num"></td>
<td class="new_line diff-line-num"></td>
<td v-if="error" class="js-error-lazy-load-diff diff-loading-error-block">
- {{ error }} Unable to load the diff
+ {{ __('Unable to load the diff') }}
<button
class="btn-link btn-link-retry btn-no-padding js-toggle-lazy-diff-retry-button"
@click="fetchDiff"
diff --git a/app/assets/javascripts/pages/projects/services/edit/index.js b/app/assets/javascripts/pages/projects/services/edit/index.js
index 5249709a2a3..04f3877ab48 100644
--- a/app/assets/javascripts/pages/projects/services/edit/index.js
+++ b/app/assets/javascripts/pages/projects/services/edit/index.js
@@ -1,5 +1,5 @@
import IntegrationSettingsForm from '~/integrations/integration_settings_form';
-import PrometheusMetrics from '~/prometheus_metrics/custom_metrics';
+import CustomMetrics from '~/prometheus_metrics/custom_metrics';
import PrometheusAlerts from '~/prometheus_alerts';
import initAlertsSettings from '~/alerts_service_settings';
@@ -10,8 +10,8 @@ document.addEventListener('DOMContentLoaded', () => {
const prometheusSettingsSelector = '.js-prometheus-metrics-monitoring';
const prometheusSettingsWrapper = document.querySelector(prometheusSettingsSelector);
if (prometheusSettingsWrapper) {
- const prometheusMetrics = new PrometheusMetrics(prometheusSettingsSelector);
- prometheusMetrics.init();
+ const customMetrics = new CustomMetrics(prometheusSettingsSelector);
+ customMetrics.init();
}
PrometheusAlerts();
diff --git a/app/assets/javascripts/pipelines/components/graph/graph_component.vue b/app/assets/javascripts/pipelines/components/graph/graph_component.vue
index ef3f4d0e3f6..1ff5b662d18 100644
--- a/app/assets/javascripts/pipelines/components/graph/graph_component.vue
+++ b/app/assets/javascripts/pipelines/components/graph/graph_component.vue
@@ -135,7 +135,7 @@ export default {
paddingRight: `${graphRightPadding}px`,
}"
>
- <gl-loading-icon v-if="isLoading" class="m-auto" :size="3" />
+ <gl-loading-icon v-if="isLoading" class="m-auto" size="lg" />
<pipeline-graph
v-if="pipelineTypeUpstream"
diff --git a/app/assets/javascripts/pipelines/components/header_component.vue b/app/assets/javascripts/pipelines/components/header_component.vue
index 2a3d022c5cd..e7777d0d3af 100644
--- a/app/assets/javascripts/pipelines/components/header_component.vue
+++ b/app/assets/javascripts/pipelines/components/header_component.vue
@@ -108,7 +108,7 @@ export default {
/>
</ci-header>
- <gl-loading-icon v-if="isLoading" :size="2" class="prepend-top-default append-bottom-default" />
+ <gl-loading-icon v-if="isLoading" size="lg" class="prepend-top-default append-bottom-default" />
<gl-modal
:modal-id="$options.DELETE_MODAL_ID"
diff --git a/app/assets/javascripts/pipelines/components/pipelines.vue b/app/assets/javascripts/pipelines/components/pipelines.vue
index accd6bf71f4..d4f23697e09 100644
--- a/app/assets/javascripts/pipelines/components/pipelines.vue
+++ b/app/assets/javascripts/pipelines/components/pipelines.vue
@@ -271,7 +271,7 @@ export default {
<gl-loading-icon
v-if="stateToRender === $options.stateMap.loading"
:label="s__('Pipelines|Loading Pipelines')"
- :size="3"
+ size="lg"
class="prepend-top-20"
/>
diff --git a/app/assets/javascripts/pipelines/components/test_reports/test_suite_table.vue b/app/assets/javascripts/pipelines/components/test_reports/test_suite_table.vue
index 65c1f125b55..be7f27f210d 100644
--- a/app/assets/javascripts/pipelines/components/test_reports/test_suite_table.vue
+++ b/app/assets/javascripts/pipelines/components/test_reports/test_suite_table.vue
@@ -74,7 +74,7 @@ export default {
<div class="table-section section-20 section-wrap">
<div role="rowheader" class="table-mobile-header">{{ __('Name') }}</div>
- <div class="table-mobile-content">{{ testCase.name }}</div>
+ <div class="table-mobile-content pr-md-1 text-truncate">{{ testCase.name }}</div>
</div>
<div class="table-section section-10 section-wrap">
diff --git a/app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue b/app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue
index f1106dc6ae9..571d305a50c 100644
--- a/app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue
+++ b/app/assets/javascripts/projects/tree/components/commit_pipeline_status_component.vue
@@ -94,7 +94,7 @@ export default {
</script>
<template>
<div class="ci-status-link">
- <gl-loading-icon v-if="isLoading" :size="3" label="Loading pipeline status" />
+ <gl-loading-icon v-if="isLoading" size="lg" label="Loading pipeline status" />
<a v-else :href="ciStatus.details_path">
<ci-icon
v-tooltip
diff --git a/app/assets/javascripts/releases/components/app_edit.vue b/app/assets/javascripts/releases/components/app_edit.vue
index 06e388002e4..df356c18417 100644
--- a/app/assets/javascripts/releases/components/app_edit.vue
+++ b/app/assets/javascripts/releases/components/app_edit.vue
@@ -1,6 +1,6 @@
<script>
-import { mapState, mapActions } from 'vuex';
-import { GlDeprecatedButton, GlLink, GlFormInput, GlFormGroup } from '@gitlab/ui';
+import { mapState, mapActions, mapGetters } from 'vuex';
+import { GlNewButton, GlFormInput, GlFormGroup } from '@gitlab/ui';
import { escape as esc } from 'lodash';
import { __, sprintf } from '~/locale';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
@@ -15,8 +15,7 @@ export default {
components: {
GlFormInput,
GlFormGroup,
- GlDeprecatedButton,
- GlLink,
+ GlNewButton,
MarkdownField,
AssetLinksForm,
},
@@ -27,12 +26,14 @@ export default {
computed: {
...mapState('detail', [
'isFetchingRelease',
+ 'isUpdatingRelease',
'fetchError',
'markdownDocsPath',
'markdownPreviewPath',
'releasesPagePath',
'updateReleaseApiDocsPath',
]),
+ ...mapGetters('detail', ['isValid']),
showForm() {
return !this.isFetchingRelease && !this.fetchError;
},
@@ -87,6 +88,9 @@ export default {
showAssetLinksForm() {
return this.glFeatures.releaseAssetLinkEditing;
},
+ isSaveChangesDisabled() {
+ return this.isUpdatingRelease || !this.isValid;
+ },
},
created() {
this.fetchRelease();
@@ -163,17 +167,19 @@ export default {
<asset-links-form v-if="showAssetLinksForm" />
<div class="d-flex pt-3">
- <gl-deprecated-button
- class="mr-auto js-submit-button"
+ <gl-new-button
+ class="mr-auto js-no-auto-disable"
+ category="primary"
variant="success"
type="submit"
:aria-label="__('Save changes')"
+ :disabled="isSaveChangesDisabled"
>
{{ __('Save changes') }}
- </gl-deprecated-button>
- <gl-link :href="cancelPath" class="js-cancel-button btn btn-default">
+ </gl-new-button>
+ <gl-new-button :href="cancelPath" class="js-cancel-button">
{{ __('Cancel') }}
- </gl-link>
+ </gl-new-button>
</div>
</form>
</div>
diff --git a/app/assets/javascripts/releases/components/asset_links_form.vue b/app/assets/javascripts/releases/components/asset_links_form.vue
index 7299fd24ec5..6ca700c2b30 100644
--- a/app/assets/javascripts/releases/components/asset_links_form.vue
+++ b/app/assets/javascripts/releases/components/asset_links_form.vue
@@ -1,10 +1,10 @@
<script>
-import { mapState, mapActions } from 'vuex';
+import { mapState, mapActions, mapGetters } from 'vuex';
import {
GlSprintf,
GlLink,
GlFormGroup,
- GlDeprecatedButton,
+ GlNewButton,
GlIcon,
GlTooltipDirective,
GlFormInput,
@@ -12,13 +12,14 @@ import {
export default {
name: 'AssetLinksForm',
- components: { GlSprintf, GlLink, GlFormGroup, GlDeprecatedButton, GlIcon, GlFormInput },
+ components: { GlSprintf, GlLink, GlFormGroup, GlNewButton, GlIcon, GlFormInput },
directives: { GlTooltip: GlTooltipDirective },
computed: {
...mapState('detail', ['release', 'releaseAssetsDocsPath']),
+ ...mapGetters('detail', ['validationErrors']),
},
created() {
- this.addEmptyAssetLink();
+ this.ensureAtLeastOneLink();
},
methods: {
...mapActions('detail', [
@@ -32,6 +33,7 @@ export default {
},
onRemoveClicked(linkId) {
this.removeAssetLink(linkId);
+ this.ensureAtLeastOneLink();
},
onUrlInput(linkIdToUpdate, newUrl) {
this.updateAssetLinkUrl({ linkIdToUpdate, newUrl });
@@ -39,6 +41,37 @@ export default {
onLinkTitleInput(linkIdToUpdate, newName) {
this.updateAssetLinkName({ linkIdToUpdate, newName });
},
+ hasDuplicateUrl(link) {
+ return Boolean(this.getLinkErrors(link).isDuplicate);
+ },
+ hasBadFormat(link) {
+ return Boolean(this.getLinkErrors(link).isBadFormat);
+ },
+ hasEmptyUrl(link) {
+ return Boolean(this.getLinkErrors(link).isUrlEmpty);
+ },
+ hasEmptyName(link) {
+ return Boolean(this.getLinkErrors(link).isNameEmpty);
+ },
+ getLinkErrors(link) {
+ return this.validationErrors.assets.links[link.id] || {};
+ },
+ isUrlValid(link) {
+ return !this.hasDuplicateUrl(link) && !this.hasBadFormat(link) && !this.hasEmptyUrl(link);
+ },
+ isNameValid(link) {
+ return !this.hasEmptyName(link);
+ },
+
+ /**
+ * Make sure the form is never completely empty by adding an
+ * empty row if the form contains 0 links
+ */
+ ensureAtLeastOneLink() {
+ if (this.release.assets.links.length === 0) {
+ this.addEmptyAssetLink();
+ }
+ },
},
};
</script>
@@ -69,60 +102,93 @@ export default {
<p>
{{
__(
- 'Point to any links you like: documentation, built binaries, or other related materials. These can be internal or external links from your GitLab instance.',
+ 'Point to any links you like: documentation, built binaries, or other related materials. These can be internal or external links from your GitLab instance. Duplicate URLs are not allowed.',
)
}}
</p>
<div
v-for="(link, index) in release.assets.links"
:key="link.id"
- class="d-flex flex-column flex-sm-row align-items-stretch align-items-sm-end"
+ class="row flex-column flex-sm-row align-items-stretch align-items-sm-start"
>
<gl-form-group
- class="url-field form-group flex-grow-1 mr-sm-4"
+ class="url-field form-group col"
:label="__('URL')"
:label-for="`asset-url-${index}`"
>
<gl-form-input
:id="`asset-url-${index}`"
+ ref="urlInput"
:value="link.url"
type="text"
class="form-control"
+ :state="isUrlValid(link)"
@change="onUrlInput(link.id, $event)"
/>
+ <template #invalid-feedback>
+ <span v-if="hasEmptyUrl(link)" class="invalid-feedback d-inline">
+ {{ __('URL is required') }}
+ </span>
+ <span v-else-if="hasBadFormat(link)" class="invalid-feedback d-inline">
+ <gl-sprintf
+ :message="
+ __(
+ 'URL must start with %{codeStart}http://%{codeEnd}, %{codeStart}https://%{codeEnd}, or %{codeStart}ftp://%{codeEnd}',
+ )
+ "
+ >
+ <template #code="{ content }">
+ <code>{{ content }}</code>
+ </template>
+ </gl-sprintf>
+ </span>
+ <span v-else-if="hasDuplicateUrl(link)" class="invalid-feedback d-inline">
+ {{ __('This URL is already used for another link; duplicate URLs are not allowed') }}
+ </span>
+ </template>
</gl-form-group>
<gl-form-group
- class="link-title-field flex-grow-1 mr-sm-4"
+ class="link-title-field col"
:label="__('Link title')"
:label-for="`asset-link-name-${index}`"
>
<gl-form-input
:id="`asset-link-name-${index}`"
+ ref="nameInput"
:value="link.name"
type="text"
class="form-control"
+ :state="isNameValid(link)"
@change="onLinkTitleInput(link.id, $event)"
/>
+ <template v-slot:invalid-feedback>
+ <span v-if="hasEmptyName(link)" class="invalid-feedback d-inline">
+ {{ __('Link title is required') }}
+ </span>
+ </template>
</gl-form-group>
- <gl-deprecated-button
- v-gl-tooltip
- class="mb-5 mb-sm-3 flex-grow-0 flex-shrink-0 remove-button"
- :aria-label="__('Remove asset link')"
- :title="__('Remove asset link')"
- @click="onRemoveClicked(link.id)"
- >
- <gl-icon class="m-0" name="remove" />
- <span class="d-inline d-sm-none">{{ __('Remove asset link') }}</span>
- </gl-deprecated-button>
+ <div class="mb-5 mb-sm-3 mt-sm-4 col col-sm-auto">
+ <gl-new-button
+ v-gl-tooltip
+ class="remove-button w-100"
+ :aria-label="__('Remove asset link')"
+ :title="__('Remove asset link')"
+ @click="onRemoveClicked(link.id)"
+ >
+ <gl-icon class="mr-1 mr-sm-0 mb-1" :size="16" name="remove" />
+ <span class="d-inline d-sm-none">{{ __('Remove asset link') }}</span>
+ </gl-new-button>
+ </div>
</div>
- <gl-deprecated-button
+ <gl-new-button
+ ref="addAnotherLinkButton"
variant="link"
class="align-self-end mb-5 mb-sm-0"
@click="onAddAnotherClicked"
>
{{ __('Add another link') }}
- </gl-deprecated-button>
+ </gl-new-button>
</div>
</template>
diff --git a/app/assets/javascripts/releases/stores/modules/detail/getters.js b/app/assets/javascripts/releases/stores/modules/detail/getters.js
index 562284dc48d..84dc2fca4be 100644
--- a/app/assets/javascripts/releases/stores/modules/detail/getters.js
+++ b/app/assets/javascripts/releases/stores/modules/detail/getters.js
@@ -1,9 +1,13 @@
+import { isEmpty } from 'lodash';
+import { hasContent } from '~/lib/utils/text_utility';
+
/**
+ * @param {Object} link The link to test
* @returns {Boolean} `true` if the release link is empty, i.e. it has
* empty (or whitespace-only) values for both `url` and `name`.
* Otherwise, `false`.
*/
-const isEmptyReleaseLink = l => !/\S/.test(l.url) && !/\S/.test(l.name);
+const isEmptyReleaseLink = link => !hasContent(link.url) && !hasContent(link.name);
/** Returns all release links that aren't empty */
export const releaseLinksToCreate = state => {
@@ -22,3 +26,67 @@ export const releaseLinksToDelete = state => {
return state.originalRelease.assets.links;
};
+
+/** Returns all validation errors on the release object */
+export const validationErrors = state => {
+ const errors = {
+ assets: {
+ links: {},
+ },
+ };
+
+ if (!state.release) {
+ return errors;
+ }
+
+ // Each key of this object is a URL, and the value is an
+ // array of Release link objects that share this URL.
+ // This is used for detecting duplicate URLs.
+ const urlToLinksMap = new Map();
+
+ state.release.assets.links.forEach(link => {
+ errors.assets.links[link.id] = {};
+
+ // Only validate non-empty URLs
+ if (isEmptyReleaseLink(link)) {
+ return;
+ }
+
+ if (!hasContent(link.url)) {
+ errors.assets.links[link.id].isUrlEmpty = true;
+ }
+
+ if (!hasContent(link.name)) {
+ errors.assets.links[link.id].isNameEmpty = true;
+ }
+
+ const normalizedUrl = link.url.trim().toLowerCase();
+
+ // Compare each URL to every other URL and flag any duplicates
+ if (urlToLinksMap.has(normalizedUrl)) {
+ // a duplicate URL was found!
+
+ // add a validation error for each link that shares this URL
+ const duplicates = urlToLinksMap.get(normalizedUrl);
+ duplicates.push(link);
+ duplicates.forEach(duplicateLink => {
+ errors.assets.links[duplicateLink.id].isDuplicate = true;
+ });
+ } else {
+ // no duplicate URL was found
+
+ urlToLinksMap.set(normalizedUrl, [link]);
+ }
+
+ if (!/^(http|https|ftp):\/\//.test(normalizedUrl)) {
+ errors.assets.links[link.id].isBadFormat = true;
+ }
+ });
+
+ return errors;
+};
+
+/** Returns whether or not the release object is valid */
+export const isValid = (_state, getters) => {
+ return Object.values(getters.validationErrors.assets.links).every(isEmpty);
+};
diff --git a/app/assets/javascripts/sentry_error_stack_trace/components/sentry_error_stack_trace.vue b/app/assets/javascripts/sentry_error_stack_trace/components/sentry_error_stack_trace.vue
index c90478db620..807f10bd9c6 100644
--- a/app/assets/javascripts/sentry_error_stack_trace/components/sentry_error_stack_trace.vue
+++ b/app/assets/javascripts/sentry_error_stack_trace/components/sentry_error_stack_trace.vue
@@ -36,7 +36,7 @@ export default {
</div>
</div>
<div v-if="loadingStacktrace" class="card">
- <gl-loading-icon class="py-2" label="Fetching stack trace" :size="1" />
+ <gl-loading-icon class="py-2" label="Fetching stack trace" size="sm" />
</div>
<stacktrace v-else :entries="stacktrace" />
</div>
diff --git a/app/assets/javascripts/serverless/components/functions.vue b/app/assets/javascripts/serverless/components/functions.vue
index e06149f2bcb..2b1291ac70f 100644
--- a/app/assets/javascripts/serverless/components/functions.vue
+++ b/app/assets/javascripts/serverless/components/functions.vue
@@ -77,7 +77,7 @@ export default {
<section id="serverless-functions" class="flex-grow">
<gl-loading-icon
v-if="checkingInstalled"
- :size="2"
+ size="lg"
class="prepend-top-default append-bottom-default"
/>
@@ -97,7 +97,7 @@ export default {
</template>
<gl-loading-icon
v-if="isLoading"
- :size="2"
+ size="lg"
class="prepend-top-default append-bottom-default js-functions-loader"
/>
</div>
diff --git a/app/assets/javascripts/smart_interval.js b/app/assets/javascripts/smart_interval.js
index 8ca590123ae..0e52d2d8010 100644
--- a/app/assets/javascripts/smart_interval.js
+++ b/app/assets/javascripts/smart_interval.js
@@ -33,7 +33,7 @@ export default class SmartInterval {
this.state = {
intervalId: null,
currentInterval: this.cfg.startingInterval,
- pageVisibility: 'visible',
+ pagevisibile: true,
};
this.initInterval();
@@ -91,8 +91,10 @@ export default class SmartInterval {
}
destroy() {
+ document.removeEventListener('visibilitychange', this.onVisibilityChange);
+ window.removeEventListener('blur', this.onWindowVisibilityChange);
+ window.removeEventListener('focus', this.onWindowVisibilityChange);
this.cancel();
- document.removeEventListener('visibilitychange', this.handleVisibilityChange);
$(document)
.off('visibilitychange')
.off('beforeunload');
@@ -124,9 +126,21 @@ export default class SmartInterval {
});
}
+ onWindowVisibilityChange(e) {
+ this.state.pagevisibile = e.type === 'focus';
+ this.handleVisibilityChange();
+ }
+
+ onVisibilityChange(e) {
+ this.state.pagevisibile = e.target.visibilityState === 'visible';
+ this.handleVisibilityChange();
+ }
+
initVisibilityChangeHandling() {
- // cancel interval when tab no longer shown (prevents cached pages from polling)
- document.addEventListener('visibilitychange', this.handleVisibilityChange.bind(this));
+ // cancel interval when tab or window is no longer shown (prevents cached pages from polling)
+ document.addEventListener('visibilitychange', this.onVisibilityChange.bind(this));
+ window.addEventListener('blur', this.onWindowVisibilityChange.bind(this));
+ window.addEventListener('focus', this.onWindowVisibilityChange.bind(this));
}
initPageUnloadHandling() {
@@ -135,8 +149,7 @@ export default class SmartInterval {
$(document).on('beforeunload', () => this.cancel());
}
- handleVisibilityChange(e) {
- this.state.pageVisibility = e.target.visibilityState;
+ handleVisibilityChange() {
const intervalAction = this.isPageVisible()
? this.onVisibilityVisible
: this.onVisibilityHidden;
@@ -166,7 +179,7 @@ export default class SmartInterval {
}
isPageVisible() {
- return this.state.pageVisibility === 'visible';
+ return this.state.pagevisibile;
}
stopTimer() {
diff --git a/app/assets/javascripts/snippets/components/show.vue b/app/assets/javascripts/snippets/components/show.vue
index e98f56d87f5..bc0034d397e 100644
--- a/app/assets/javascripts/snippets/components/show.vue
+++ b/app/assets/javascripts/snippets/components/show.vue
@@ -1,10 +1,11 @@
<script>
-import GetSnippetQuery from '../queries/snippet.query.graphql';
import SnippetHeader from './snippet_header.vue';
import SnippetTitle from './snippet_title.vue';
import SnippetBlob from './snippet_blob_view.vue';
import { GlLoadingIcon } from '@gitlab/ui';
+import { getSnippetMixin } from '../mixins/snippets';
+
export default {
components: {
SnippetHeader,
@@ -12,33 +13,7 @@ export default {
GlLoadingIcon,
SnippetBlob,
},
- apollo: {
- snippet: {
- query: GetSnippetQuery,
- variables() {
- return {
- ids: this.snippetGid,
- };
- },
- update: data => data.snippets.edges[0].node,
- },
- },
- props: {
- snippetGid: {
- type: String,
- required: true,
- },
- },
- data() {
- return {
- snippet: {},
- };
- },
- computed: {
- isLoading() {
- return this.$apollo.queries.snippet.loading;
- },
- },
+ mixins: [getSnippetMixin],
};
</script>
<template>
@@ -46,7 +21,7 @@ export default {
<gl-loading-icon
v-if="isLoading"
:label="__('Loading snippet')"
- :size="2"
+ size="lg"
class="loading-animation prepend-top-20 append-bottom-20"
/>
<template v-else>
diff --git a/app/assets/javascripts/snippets/components/snippet_blob_edit.vue b/app/assets/javascripts/snippets/components/snippet_blob_edit.vue
index ae6f451df18..44b4607e5a9 100644
--- a/app/assets/javascripts/snippets/components/snippet_blob_edit.vue
+++ b/app/assets/javascripts/snippets/components/snippet_blob_edit.vue
@@ -37,7 +37,7 @@ export default {
<gl-loading-icon
v-if="isLoading"
:label="__('Loading snippet')"
- :size="2"
+ size="lg"
class="loading-animation prepend-top-20 append-bottom-20"
/>
<blob-content-edit
diff --git a/app/assets/javascripts/snippets/components/snippet_blob_view.vue b/app/assets/javascripts/snippets/components/snippet_blob_view.vue
index 3e3dcab70c0..02a0fc7686d 100644
--- a/app/assets/javascripts/snippets/components/snippet_blob_view.vue
+++ b/app/assets/javascripts/snippets/components/snippet_blob_view.vue
@@ -3,10 +3,8 @@ import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
import { SNIPPET_VISIBILITY_PUBLIC } from '../constants';
import BlobHeader from '~/blob/components/blob_header.vue';
import BlobContent from '~/blob/components/blob_content.vue';
-import { GlLoadingIcon } from '@gitlab/ui';
import CloneDropdownButton from '~/vue_shared/components/clone_dropdown.vue';
-import GetSnippetBlobQuery from '../queries/snippet.blob.query.graphql';
import GetBlobContent from '../queries/snippet.blob.content.query.graphql';
import { SIMPLE_BLOB_VIEWER, RICH_BLOB_VIEWER } from '~/blob/components/constants';
@@ -16,25 +14,9 @@ export default {
BlobEmbeddable,
BlobHeader,
BlobContent,
- GlLoadingIcon,
CloneDropdownButton,
},
apollo: {
- blob: {
- query: GetSnippetBlobQuery,
- variables() {
- return {
- ids: this.snippet.id,
- };
- },
- update: data => data.snippets.edges[0].node.blob,
- result(res) {
- const viewer = res.data.snippets.edges[0].node.blob.richViewer
- ? RICH_BLOB_VIEWER
- : SIMPLE_BLOB_VIEWER;
- this.switchViewer(viewer, true);
- },
- },
blobContent: {
query: GetBlobContent,
variables() {
@@ -55,18 +37,18 @@ export default {
},
data() {
return {
- blob: {},
+ blob: this.snippet.blob,
blobContent: '',
- activeViewerType: window.location.hash ? SIMPLE_BLOB_VIEWER : '',
+ activeViewerType:
+ this.snippet.blob?.richViewer && !window.location.hash
+ ? RICH_BLOB_VIEWER
+ : SIMPLE_BLOB_VIEWER,
};
},
computed: {
embeddable() {
return this.snippet.visibilityLevel === SNIPPET_VISIBILITY_PUBLIC;
},
- isBlobLoading() {
- return this.$apollo.queries.blob.loading;
- },
isContentLoading() {
return this.$apollo.queries.blobContent.loading;
},
@@ -79,8 +61,8 @@ export default {
},
},
methods: {
- switchViewer(newViewer, respectHash = false) {
- this.activeViewerType = respectHash && window.location.hash ? SIMPLE_BLOB_VIEWER : newViewer;
+ switchViewer(newViewer) {
+ this.activeViewerType = newViewer;
},
},
};
@@ -88,13 +70,7 @@ export default {
<template>
<div>
<blob-embeddable v-if="embeddable" class="mb-3" :url="snippet.webUrl" />
- <gl-loading-icon
- v-if="isBlobLoading"
- :label="__('Loading blob')"
- size="lg"
- class="prepend-top-20 append-bottom-20"
- />
- <article v-else class="file-holder snippet-file-content">
+ <article class="file-holder snippet-file-content">
<blob-header :blob="blob" :active-viewer-type="viewer.type" @viewer-changed="switchViewer">
<template #actions>
<clone-dropdown-button
diff --git a/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql b/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
index 22aab7c7795..d793d0b6bb4 100644
--- a/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
+++ b/app/assets/javascripts/snippets/fragments/snippetBase.fragment.graphql
@@ -1,3 +1,5 @@
+#import '~/graphql_shared/fragments/blobviewer.fragment.graphql'
+
fragment SnippetBase on Snippet {
id
title
@@ -9,6 +11,19 @@ fragment SnippetBase on Snippet {
webUrl
httpUrlToRepo
sshUrlToRepo
+ blob {
+ binary
+ name
+ path
+ rawPath
+ size
+ simpleViewer {
+ ...BlobViewer
+ }
+ richViewer {
+ ...BlobViewer
+ }
+ }
userPermissions {
adminSnippet
updateSnippet
diff --git a/app/assets/javascripts/snippets/mixins/snippets.js b/app/assets/javascripts/snippets/mixins/snippets.js
new file mode 100644
index 00000000000..837c41cdf6b
--- /dev/null
+++ b/app/assets/javascripts/snippets/mixins/snippets.js
@@ -0,0 +1,39 @@
+import GetSnippetQuery from '../queries/snippet.query.graphql';
+
+export const getSnippetMixin = {
+ apollo: {
+ snippet: {
+ query: GetSnippetQuery,
+ variables() {
+ return {
+ ids: this.snippetGid,
+ };
+ },
+ update: data => data.snippets.edges[0]?.node,
+ result(res) {
+ if (this.onSnippetFetch) {
+ this.onSnippetFetch(res);
+ }
+ },
+ },
+ },
+ props: {
+ snippetGid: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ snippet: {},
+ newSnippet: false,
+ };
+ },
+ computed: {
+ isLoading() {
+ return this.$apollo.queries.snippet.loading;
+ },
+ },
+};
+
+export default () => {};
diff --git a/app/assets/javascripts/snippets/queries/snippet.blob.query.graphql b/app/assets/javascripts/snippets/queries/snippet.blob.query.graphql
deleted file mode 100644
index 785c88c185a..00000000000
--- a/app/assets/javascripts/snippets/queries/snippet.blob.query.graphql
+++ /dev/null
@@ -1,24 +0,0 @@
-#import '~/graphql_shared/fragments/blobviewer.fragment.graphql'
-
-query SnippetBlobFull($ids: [ID!]) {
- snippets(ids: $ids) {
- edges {
- node {
- id
- blob {
- binary
- name
- path
- rawPath
- size
- simpleViewer {
- ...BlobViewer
- }
- richViewer {
- ...BlobViewer
- }
- }
- }
- }
- }
-}
diff --git a/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue b/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue
index adcacf8a1b0..d76c6d9d681 100644
--- a/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue
+++ b/app/assets/javascripts/static_site_editor/components/saved_changes_message.vue
@@ -36,7 +36,7 @@ export default {
<template>
<div>
- <div>
+ <div class="border-bottom pb-4">
<h3>{{ s__('StaticSiteEditor|Success!') }}</h3>
<p>
{{
@@ -45,35 +45,37 @@ export default {
)
}}
</p>
- <div>
+ <div class="d-flex justify-content-end">
<gl-new-button ref="returnToSiteButton" :href="returnUrl">{{
s__('StaticSiteEditor|Return to site')
}}</gl-new-button>
- <gl-new-button ref="mergeRequestButton" :href="mergeRequest.url" variant="info">{{
- s__('StaticSiteEditor|View merge request')
- }}</gl-new-button>
+ <gl-new-button
+ ref="mergeRequestButton"
+ class="ml-2"
+ :href="mergeRequest.url"
+ variant="success"
+ >{{ s__('StaticSiteEditor|View merge request') }}</gl-new-button
+ >
</div>
</div>
- <hr />
-
- <div>
+ <div class="pt-2">
<h4>{{ s__('StaticSiteEditor|Summary of changes') }}</h4>
<ul>
<li>
- {{ s__('StaticSiteEditor|A new branch was created:') }}
+ {{ s__('StaticSiteEditor|You created a new branch:') }}
<gl-link ref="branchLink" :href="branch.url">{{ branch.label }}</gl-link>
</li>
<li>
- {{ s__('StaticSiteEditor|Your changes were committed to it:') }}
- <gl-link ref="commitLink" :href="commit.url">{{ commit.label }}</gl-link>
- </li>
- <li>
- {{ s__('StaticSiteEditor|A merge request was created:') }}
+ {{ s__('StaticSiteEditor|You created a merge request:') }}
<gl-link ref="mergeRequestLink" :href="mergeRequest.url">{{
mergeRequest.label
}}</gl-link>
</li>
+ <li>
+ {{ s__('StaticSiteEditor|You added a commit:') }}
+ <gl-link ref="commitLink" :href="commit.url">{{ commit.label }}</gl-link>
+ </li>
</ul>
</div>
</div>
diff --git a/app/assets/javascripts/static_site_editor/components/static_site_editor.vue b/app/assets/javascripts/static_site_editor/components/static_site_editor.vue
index e711510ba44..8deae2f2c8a 100644
--- a/app/assets/javascripts/static_site_editor/components/static_site_editor.vue
+++ b/app/assets/javascripts/static_site_editor/components/static_site_editor.vue
@@ -12,8 +12,8 @@ export default {
Toolbar,
},
computed: {
- ...mapState(['content', 'isLoadingContent', 'isSavingChanges']),
- ...mapGetters(['isContentLoaded', 'contentChanged']),
+ ...mapState(['content', 'isLoadingContent', 'isSavingChanges', 'isContentLoaded']),
+ ...mapGetters(['contentChanged']),
},
mounted() {
this.loadContent();
diff --git a/app/assets/javascripts/static_site_editor/constants.js b/app/assets/javascripts/static_site_editor/constants.js
new file mode 100644
index 00000000000..5081d467016
--- /dev/null
+++ b/app/assets/javascripts/static_site_editor/constants.js
@@ -0,0 +1,12 @@
+import { s__ } from '~/locale';
+
+export const BRANCH_SUFFIX_COUNT = 8;
+export const DEFAULT_TARGET_BRANCH = 'master';
+
+export const SUBMIT_CHANGES_BRANCH_ERROR = s__('StaticSiteEditor|Branch could not be created.');
+export const SUBMIT_CHANGES_COMMIT_ERROR = s__(
+ 'StaticSiteEditor|Could not commit the content changes.',
+);
+export const SUBMIT_CHANGES_MERGE_REQUEST_ERROR = s__(
+ 'StaticSiteEditor|Could not create merge request.',
+);
diff --git a/app/assets/javascripts/static_site_editor/services/generate_branch_name.js b/app/assets/javascripts/static_site_editor/services/generate_branch_name.js
new file mode 100644
index 00000000000..f45ad616332
--- /dev/null
+++ b/app/assets/javascripts/static_site_editor/services/generate_branch_name.js
@@ -0,0 +1,8 @@
+import { BRANCH_SUFFIX_COUNT, DEFAULT_TARGET_BRANCH } from '../constants';
+
+const generateBranchSuffix = () => `${Date.now()}`.substr(BRANCH_SUFFIX_COUNT);
+
+const generateBranchName = (username, targetBranch = DEFAULT_TARGET_BRANCH) =>
+ `${username}-${targetBranch}-patch-${generateBranchSuffix()}`;
+
+export default generateBranchName;
diff --git a/app/assets/javascripts/static_site_editor/services/submit_content_changes.js b/app/assets/javascripts/static_site_editor/services/submit_content_changes.js
index 6b0d8c74ff7..ff591e4b245 100644
--- a/app/assets/javascripts/static_site_editor/services/submit_content_changes.js
+++ b/app/assets/javascripts/static_site_editor/services/submit_content_changes.js
@@ -1,4 +1,76 @@
-// TODO implement
-const submitContentChanges = () => new Promise(resolve => setTimeout(resolve, 1000));
+import Api from '~/api';
+import { s__, sprintf } from '~/locale';
+import { convertObjectPropsToSnakeCase } from '~/lib/utils/common_utils';
+import generateBranchName from '~/static_site_editor/services/generate_branch_name';
+
+import {
+ DEFAULT_TARGET_BRANCH,
+ SUBMIT_CHANGES_BRANCH_ERROR,
+ SUBMIT_CHANGES_COMMIT_ERROR,
+ SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
+} from '../constants';
+
+const createBranch = (projectId, branch) =>
+ Api.createBranch(projectId, {
+ ref: DEFAULT_TARGET_BRANCH,
+ branch,
+ }).catch(() => {
+ throw new Error(SUBMIT_CHANGES_BRANCH_ERROR);
+ });
+
+const commitContent = (projectId, message, branch, sourcePath, content) =>
+ Api.commitMultiple(
+ projectId,
+ convertObjectPropsToSnakeCase({
+ branch,
+ commitMessage: message,
+ actions: [
+ convertObjectPropsToSnakeCase({
+ action: 'update',
+ filePath: sourcePath,
+ content,
+ }),
+ ],
+ }),
+ ).catch(() => {
+ throw new Error(SUBMIT_CHANGES_COMMIT_ERROR);
+ });
+
+const createMergeRequest = (projectId, title, sourceBranch, targetBranch = DEFAULT_TARGET_BRANCH) =>
+ Api.createProjectMergeRequest(
+ projectId,
+ convertObjectPropsToSnakeCase({
+ title,
+ sourceBranch,
+ targetBranch,
+ }),
+ ).catch(() => {
+ throw new Error(SUBMIT_CHANGES_MERGE_REQUEST_ERROR);
+ });
+
+const submitContentChanges = ({ username, projectId, sourcePath, content }) => {
+ const branch = generateBranchName(username);
+ const mergeRequestTitle = sprintf(s__(`StaticSiteEditor|Update %{sourcePath} file`), {
+ sourcePath,
+ });
+ const meta = {};
+
+ return createBranch(projectId, branch)
+ .then(() => {
+ Object.assign(meta, { branch: { label: branch } });
+
+ return commitContent(projectId, mergeRequestTitle, branch, sourcePath, content);
+ })
+ .then(({ data: { short_id: label, web_url: url } }) => {
+ Object.assign(meta, { commit: { label, url } });
+
+ return createMergeRequest(projectId, mergeRequestTitle, branch);
+ })
+ .then(({ data: { iid: label, web_url: url } }) => {
+ Object.assign(meta, { mergeRequest: { label, url } });
+
+ return meta;
+ });
+};
export default submitContentChanges;
diff --git a/app/assets/javascripts/static_site_editor/store/getters.js b/app/assets/javascripts/static_site_editor/store/getters.js
index 41256201c26..ebc68f8e9e6 100644
--- a/app/assets/javascripts/static_site_editor/store/getters.js
+++ b/app/assets/javascripts/static_site_editor/store/getters.js
@@ -1,2 +1,2 @@
-export const isContentLoaded = ({ originalContent }) => Boolean(originalContent);
+// eslint-disable-next-line import/prefer-default-export
export const contentChanged = ({ originalContent, content }) => originalContent !== content;
diff --git a/app/assets/javascripts/static_site_editor/store/mutations.js b/app/assets/javascripts/static_site_editor/store/mutations.js
index f98177bbc18..4727d04439c 100644
--- a/app/assets/javascripts/static_site_editor/store/mutations.js
+++ b/app/assets/javascripts/static_site_editor/store/mutations.js
@@ -6,6 +6,7 @@ export default {
},
[types.RECEIVE_CONTENT_SUCCESS](state, { title, content }) {
state.isLoadingContent = false;
+ state.isContentLoaded = true;
state.title = title;
state.content = content;
state.originalContent = content;
diff --git a/app/assets/javascripts/static_site_editor/store/state.js b/app/assets/javascripts/static_site_editor/store/state.js
index e457fde591a..d48cc8ed1a4 100644
--- a/app/assets/javascripts/static_site_editor/store/state.js
+++ b/app/assets/javascripts/static_site_editor/store/state.js
@@ -6,10 +6,14 @@ const createState = (initialState = {}) => ({
isLoadingContent: false,
isSavingChanges: false,
+ isContentLoaded: false,
+
originalContent: '',
content: '',
title: '',
+ savedContentMeta: null,
+
...initialState,
});
diff --git a/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_terraform_plan.vue b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_terraform_plan.vue
new file mode 100644
index 00000000000..edf90085a5b
--- /dev/null
+++ b/app/assets/javascripts/vue_merge_request_widget/components/mr_widget_terraform_plan.vue
@@ -0,0 +1,152 @@
+<script>
+import { __ } from '~/locale';
+import { GlIcon, GlLoadingIcon, GlSprintf } from '@gitlab/ui';
+import axios from '~/lib/utils/axios_utils';
+import CiIcon from '../../vue_shared/components/ci_icon.vue';
+import flash from '~/flash';
+import Poll from '~/lib/utils/poll';
+import Visibility from 'visibilityjs';
+
+export default {
+ name: 'MRWidgetTerraformPlan',
+ components: {
+ CiIcon,
+ GlIcon,
+ GlLoadingIcon,
+ GlSprintf,
+ },
+ props: {
+ endpoint: {
+ type: String,
+ required: true,
+ },
+ },
+ data() {
+ return {
+ loading: true,
+ plans: {},
+ };
+ },
+ computed: {
+ addNum() {
+ return Number(this.plan.create);
+ },
+ changeNum() {
+ return Number(this.plan.update);
+ },
+ deleteNum() {
+ return Number(this.plan.delete);
+ },
+ iconStatusObj() {
+ return {
+ group: 'warning',
+ icon: 'status_warning',
+ };
+ },
+ logUrl() {
+ return this.plan.job_path;
+ },
+ plan() {
+ return this.plans['tfplan.json'] || {};
+ },
+ validPlanValues() {
+ return this.addNum + this.changeNum + this.deleteNum >= 0;
+ },
+ },
+ created() {
+ this.fetchPlans();
+ },
+ methods: {
+ fetchPlans() {
+ this.loading = true;
+
+ const poll = new Poll({
+ resource: {
+ fetchPlans: () => axios.get(this.endpoint),
+ },
+ data: this.endpoint,
+ method: 'fetchPlans',
+ successCallback: ({ data }) => {
+ this.plans = data;
+ this.loading = false;
+ },
+ errorCallback: () => {
+ this.plans = {};
+ this.loading = false;
+ flash(__('An error occurred while loading terraform report'));
+ },
+ });
+
+ if (!Visibility.hidden()) {
+ poll.makeRequest();
+ }
+
+ Visibility.change(() => {
+ if (!Visibility.hidden()) {
+ poll.restart();
+ } else {
+ poll.stop();
+ }
+ });
+ },
+ },
+};
+</script>
+
+<template>
+ <section class="mr-widget-section">
+ <div class="mr-widget-body media d-flex flex-row">
+ <span class="append-right-default align-self-start align-self-lg-center">
+ <ci-icon :status="iconStatusObj" :size="24" />
+ </span>
+
+ <div class="d-flex flex-fill flex-column flex-md-row">
+ <div class="terraform-mr-plan-text normal d-flex flex-column flex-lg-row">
+ <p class="m-0 pr-1">{{ __('A terraform report was generated in your pipelines.') }}</p>
+
+ <gl-loading-icon v-if="loading" size="md" />
+
+ <p v-else-if="validPlanValues" class="m-0">
+ <gl-sprintf
+ :message="
+ __(
+ 'Reported Resource Changes: %{addNum} to add, %{changeNum} to change, %{deleteNum} to delete',
+ )
+ "
+ >
+ <template #addNum>
+ <strong>{{ addNum }}</strong>
+ </template>
+
+ <template #changeNum>
+ <strong>{{ changeNum }}</strong>
+ </template>
+
+ <template #deleteNum>
+ <strong>{{ deleteNum }}</strong>
+ </template>
+ </gl-sprintf>
+ </p>
+
+ <p v-else class="m-0">{{ __('Changes are unknown') }}</p>
+ </div>
+
+ <div class="terraform-mr-plan-actions">
+ <a
+ v-if="logUrl"
+ :href="logUrl"
+ target="_blank"
+ data-track-event="click_terraform_mr_plan_button"
+ data-track-label="mr_widget_terraform_mr_plan_button"
+ data-track-property="terraform_mr_plan_button"
+ class="btn btn-sm js-terraform-report-link"
+ rel="noopener"
+ >
+ {{ __('View full log') }}
+ <gl-icon name="external-link" />
+ </a>
+ </div>
+ </div>
+ </div>
+ </section>
+</template>
diff --git a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
index 084deee042b..05f73c4cdaf 100644
--- a/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
+++ b/app/assets/javascripts/vue_merge_request_widget/mr_widget_options.vue
@@ -36,6 +36,7 @@ import CheckingState from './components/states/mr_widget_checking.vue';
import eventHub from './event_hub';
import notify from '~/lib/utils/notify';
import SourceBranchRemovalStatus from './components/source_branch_removal_status.vue';
+import TerraformPlan from './components/mr_widget_terraform_plan.vue';
import GroupedTestReportsApp from '../reports/components/grouped_test_reports_app.vue';
import { setFaviconOverlay } from '../lib/utils/common_utils';
@@ -74,6 +75,7 @@ export default {
'mr-widget-rebase': RebaseState,
SourceBranchRemovalStatus,
GroupedTestReportsApp,
+ TerraformPlan,
},
props: {
mrData: {
@@ -212,8 +214,6 @@ export default {
return new MRWidgetService(this.getServiceEndpoints(store));
},
checkStatus(cb, isRebased) {
- if (document.visibilityState !== 'visible') return Promise.resolve();
-
return this.service
.checkStatus()
.then(({ data }) => {
@@ -236,10 +236,10 @@ export default {
initPolling() {
this.pollingInterval = new SmartInterval({
callback: this.checkStatus,
- startingInterval: 10000,
- maxInterval: 30000,
- hiddenInterval: 120000,
- incrementByFactorOf: 5000,
+ startingInterval: 10 * 1000,
+ maxInterval: 240 * 1000,
+ hiddenInterval: window.gon?.features?.widgetVisibilityPolling && 360 * 1000,
+ incrementByFactorOf: 2,
});
},
initDeploymentsPolling() {
@@ -251,10 +251,9 @@ export default {
deploymentsPoll(callback) {
return new SmartInterval({
callback,
- startingInterval: 30000,
- maxInterval: 120000,
- hiddenInterval: 240000,
- incrementByFactorOf: 15000,
+ startingInterval: 30 * 1000,
+ maxInterval: 240 * 1000,
+ incrementByFactorOf: 4,
immediateExecution: true,
});
},
@@ -379,6 +378,8 @@ export default {
:endpoint="mr.testResultsPath"
/>
+ <terraform-plan v-if="mr.terraformReportsPath" :endpoint="mr.terraformReportsPath" />
+
<div class="mr-widget-section">
<component :is="componentName" :mr="mr" :service="service" />
diff --git a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
index 321b9270dde..9f001dda540 100644
--- a/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
+++ b/app/assets/javascripts/vue_merge_request_widget/stores/mr_widget_store.js
@@ -101,6 +101,7 @@ export default class MergeRequestStore {
this.isPipelineActive = data.pipeline ? data.pipeline.active : false;
this.isPipelineBlocked = pipelineStatus ? pipelineStatus.group === 'manual' : false;
this.ciStatusFaviconPath = pipelineStatus ? pipelineStatus.favicon : null;
+ this.terraformReportsPath = data.terraform_reports_path;
this.testResultsPath = data.test_reports_path;
this.exposedArtifactsPath = data.exposed_artifacts_path;
this.cancelAutoMergePath = data.cancel_auto_merge_path;
diff --git a/app/assets/javascripts/vue_shared/components/file_row.vue b/app/assets/javascripts/vue_shared/components/file_row.vue
index e3a606571c0..0a5cc7b693c 100644
--- a/app/assets/javascripts/vue_shared/components/file_row.vue
+++ b/app/assets/javascripts/vue_shared/components/file_row.vue
@@ -111,6 +111,7 @@ export default {
v-else
:class="fileClass"
:title="textForTitle"
+ :data-level="level"
class="file-row"
role="button"
@click="clickFile"
diff --git a/app/assets/javascripts/vue_shared/components/form/form_footer_actions.vue b/app/assets/javascripts/vue_shared/components/form/form_footer_actions.vue
new file mode 100644
index 00000000000..74f988476e3
--- /dev/null
+++ b/app/assets/javascripts/vue_shared/components/form/form_footer_actions.vue
@@ -0,0 +1,7 @@
+<template functional>
+ <footer class="form-actions d-flex justify-content-between">
+ <div><slot name="prepend"></slot></div>
+ <div><slot></slot></div>
+ <div><slot name="append"></slot></div>
+ </footer>
+</template>
diff --git a/app/assets/javascripts/vue_shared/components/project_selector/project_selector.vue b/app/assets/javascripts/vue_shared/components/project_selector/project_selector.vue
index 30a9633b6dc..fd45ac52647 100644
--- a/app/assets/javascripts/vue_shared/components/project_selector/project_selector.vue
+++ b/app/assets/javascripts/vue_shared/components/project_selector/project_selector.vue
@@ -80,7 +80,7 @@ export default {
@input="onInput"
/>
<div class="d-flex flex-column">
- <gl-loading-icon v-if="showLoadingIndicator" :size="1" class="py-2 px-4" />
+ <gl-loading-icon v-if="showLoadingIndicator" size="sm" class="py-2 px-4" />
<gl-infinite-scroll
:max-list-height="402"
:fetched-items="projectSearchResults.length"
diff --git a/app/assets/stylesheets/components/dashboard_skeleton.scss b/app/assets/stylesheets/components/dashboard_skeleton.scss
index a104d035a9a..2e2c1fefc79 100644
--- a/app/assets/stylesheets/components/dashboard_skeleton.scss
+++ b/app/assets/stylesheets/components/dashboard_skeleton.scss
@@ -4,6 +4,8 @@
}
.dashboard-card {
+ @include gl-cursor-grab;
+
&-header {
&-warning {
background-color: $orange-100;
diff --git a/app/assets/stylesheets/framework/buttons.scss b/app/assets/stylesheets/framework/buttons.scss
index 6257ee3ae8e..ecf2097dc87 100644
--- a/app/assets/stylesheets/framework/buttons.scss
+++ b/app/assets/stylesheets/framework/buttons.scss
@@ -120,7 +120,7 @@
}
@mixin btn-white {
- @include btn-color($white, $border-color, $white-normal, $border-white-normal, $white-dark, $border-gray-dark, $gl-text-color);
+ @include btn-color($white, $border-color, $white-normal, $border-white-normal, $white-dark, $border-white-normal, $gl-text-color);
}
@mixin btn-with-margin {
@@ -365,7 +365,7 @@
.active {
box-shadow: $gl-btn-active-background;
- border: 1px solid $border-gray-dark !important;
+ border: 1px solid $border-white-normal !important;
background-color: $btn-active-gray-light !important;
}
}
diff --git a/app/assets/stylesheets/framework/filters.scss b/app/assets/stylesheets/framework/filters.scss
index 9a473876fa0..5f6a26d0a14 100644
--- a/app/assets/stylesheets/framework/filters.scss
+++ b/app/assets/stylesheets/framework/filters.scss
@@ -96,8 +96,8 @@
}
.name {
- background-color: $filter-name-resting-color;
- color: $filter-name-text-color;
+ background-color: $white-normal;
+ color: $gl-text-color-secondary;
border-radius: 2px 0 0 2px;
margin-right: 1px;
text-transform: capitalize;
@@ -105,7 +105,7 @@
.operator {
background-color: $white-normal;
- color: $filter-value-text-color;
+ color: $gl-text-color;
margin-right: 1px;
}
@@ -113,7 +113,7 @@
display: flex;
align-items: center;
background-color: $white-normal;
- color: $filter-value-text-color;
+ color: $gl-text-color;
border-radius: 0 2px 2px 0;
margin-right: 5px;
padding-right: 8px;
@@ -152,7 +152,7 @@
.filtered-search-token .selected,
.filtered-search-term .selected {
.name {
- background-color: $filter-name-selected-color;
+ background-color: $gray-200;
}
.operator {
diff --git a/app/assets/stylesheets/framework/typography.scss b/app/assets/stylesheets/framework/typography.scss
index 69aed2fc20a..816dbc6931c 100644
--- a/app/assets/stylesheets/framework/typography.scss
+++ b/app/assets/stylesheets/framework/typography.scss
@@ -86,13 +86,13 @@
line-height: 10px;
color: $gl-gray-700;
vertical-align: middle;
- background-color: $kdb-bg;
+ background-color: $gray-50;
border-width: 1px;
border-style: solid;
- border-color: $gl-gray-200 $gl-gray-200 $kdb-border-bottom;
+ border-color: $gray-200 $gray-200 $gray-400;
border-image: none;
border-radius: 3px;
- box-shadow: 0 -1px 0 $kdb-shadow inset;
+ box-shadow: 0 -1px 0 $gray-400 inset;
}
h1 {
diff --git a/app/assets/stylesheets/framework/variables.scss b/app/assets/stylesheets/framework/variables.scss
index 65efbabaa4f..c23623005b0 100644
--- a/app/assets/stylesheets/framework/variables.scss
+++ b/app/assets/stylesheets/framework/variables.scss
@@ -329,7 +329,6 @@ $border-white-normal: darken($white-normal, $darken-border-factor);
$border-gray-light: darken($gray-light, $darken-border-factor);
$border-gray-normal: darken($gray-normal, $darken-border-factor);
$border-gray-normal-dashed: darken($gray-normal, $darken-border-dashed-factor);
-$border-gray-dark: darken($white-normal, $darken-border-factor);
/*
* UI elements
@@ -350,13 +349,13 @@ $gl-font-size-small: 12px;
$gl-font-size-large: 16px;
$gl-font-weight-normal: 400;
$gl-font-weight-bold: 600;
-$gl-text-color: #2e2e2e;
-$gl-text-color-secondary: #707070;
-$gl-text-color-tertiary: #919191;
+$gl-text-color: $gray-900;
+$gl-text-color-secondary: $gray-700;
+$gl-text-color-tertiary: $gray-600;
$gl-text-color-quaternary: #d6d6d6;
-$gl-text-color-inverted: rgba(255, 255, 255, 1);
+$gl-text-color-inverted: $white;
$gl-text-color-secondary-inverted: rgba(255, 255, 255, 0.85);
-$gl-text-color-disabled: #919191;
+$gl-text-color-disabled: $gray-600;
$gl-grayish-blue: #7f8fa4;
$gl-gray-dark: #313236;
$gl-gray-light: #5c5c5c;
@@ -485,7 +484,7 @@ $line-removed-dark: #fac5cd;
$line-number-old: #f9d7dc;
$line-number-new: #ddfbe6;
$line-number-select: #fbf2da;
-$line-target-blue: #f6faff;
+$line-target-blue: $blue-50;
$line-select-yellow: #fcf8e7;
$line-select-yellow-dark: #f0e2bd;
$dark-diff-match-bg: rgba(255, 255, 255, 0.3);
@@ -698,7 +697,7 @@ $logs-p-color: #333;
*/
$input-height: 34px;
$input-danger-bg: #f2dede;
-$input-group-addon-bg: #f7f8fa;
+$input-group-addon-bg: $gray-50;
$gl-field-focus-shadow: rgba(0, 0, 0, 0.075);
$gl-field-focus-shadow-error: rgba($red-500, 0.6);
$input-short-width: 200px;
@@ -774,9 +773,6 @@ $select2-drop-shadow2: rgba(31, 37, 50, 0.317647);
/*
* Typography
*/
-$kdb-bg: #fcfcfc;
-$kdb-border-bottom: #bbb;
-$kdb-shadow: #bbb;
$body-text-shadow: rgba(255, 255, 255, 0.01);
/*
@@ -801,20 +797,6 @@ CI variable lists
$ci-variable-remove-button-width: calc(1em + #{2 * $gl-padding});
/*
-Filtered Search
-*/
-$filter-name-resting-color: #f8f8f8;
-$filter-name-text-color: rgba(0, 0, 0, 0.55);
-$filter-value-text-color: rgba(0, 0, 0, 0.85);
-$filter-name-selected-color: #ebebeb;
-$filter-value-selected-color: #d7d7d7;
-
-/*
-Animation Functions
-*/
-$dropdown-animation-timing: cubic-bezier(0.23, 1, 0.32, 1);
-
-/*
GitLab Plans
*/
$gl-gold-plan: #d4af37;
diff --git a/app/assets/stylesheets/pages/boards.scss b/app/assets/stylesheets/pages/boards.scss
index 78d4383ce28..11291dad28b 100644
--- a/app/assets/stylesheets/pages/boards.scss
+++ b/app/assets/stylesheets/pages/boards.scss
@@ -475,7 +475,7 @@
}
.board-card {
- border: 1px solid $border-gray-dark;
+ border: 1px solid $border-white-normal;
box-shadow: 0 1px 2px rgba($issue-boards-card-shadow, 0.3);
cursor: pointer;
}
diff --git a/app/assets/stylesheets/pages/commits.scss b/app/assets/stylesheets/pages/commits.scss
index 1a07600769c..230f390aa90 100644
--- a/app/assets/stylesheets/pages/commits.scss
+++ b/app/assets/stylesheets/pages/commits.scss
@@ -131,7 +131,7 @@
color: $gl-text-color-secondary;
padding: 1px $gl-padding-4;
cursor: pointer;
- border: 1px solid $border-gray-dark;
+ border: 1px solid $border-white-normal;
border-radius: $border-radius-default;
margin-left: 5px;
font-size: 12px;
diff --git a/app/assets/stylesheets/pages/issuable.scss b/app/assets/stylesheets/pages/issuable.scss
index c48f4b0622e..85fdcb753b4 100644
--- a/app/assets/stylesheets/pages/issuable.scss
+++ b/app/assets/stylesheets/pages/issuable.scss
@@ -426,7 +426,7 @@
height: $sidebar-toggle-height;
margin-left: 0;
padding-left: 0;
- border-bottom: 1px solid $border-gray-dark;
+ border-bottom: 1px solid $border-white-normal;
}
a.gutter-toggle {
diff --git a/app/assets/stylesheets/pages/prometheus.scss b/app/assets/stylesheets/pages/prometheus.scss
index 6e5daef3e7d..af0afa9cc3b 100644
--- a/app/assets/stylesheets/pages/prometheus.scss
+++ b/app/assets/stylesheets/pages/prometheus.scss
@@ -84,13 +84,6 @@
border-radius: $border-radius-default;
}
-.prometheus-graph-header {
- display: flex;
- align-items: center;
- justify-content: space-between;
- margin-bottom: $gl-padding-8;
-}
-
.alert-current-setting {
max-width: 240px;
diff --git a/app/assets/stylesheets/pages/tree.scss b/app/assets/stylesheets/pages/tree.scss
index 142078588df..22b5859e297 100644
--- a/app/assets/stylesheets/pages/tree.scss
+++ b/app/assets/stylesheets/pages/tree.scss
@@ -130,8 +130,8 @@
&.selected {
td {
background: $white-normal;
- border-top: 1px solid $border-gray-dark;
- border-bottom: 1px solid $border-gray-dark;
+ border-top: 1px solid $border-white-normal;
+ border-bottom: 1px solid $border-white-normal;
}
}
}
diff --git a/app/assets/stylesheets/utilities.scss b/app/assets/stylesheets/utilities.scss
index 91bed4fc9f2..2a811e08fd3 100644
--- a/app/assets/stylesheets/utilities.scss
+++ b/app/assets/stylesheets/utilities.scss
@@ -54,6 +54,11 @@
.mh-50vh { max-height: 50vh; }
+.min-width-0 {
+ // By default flex items don't shrink below their minimum content size. To change this, set the item's min-width
+ min-width: 0;
+}
+
.font-size-inherit { font-size: inherit; }
.gl-w-8 { width: px-to-rem($grid-size); }
.gl-w-16 { width: px-to-rem($grid-size * 2); }
diff --git a/app/controllers/admin/application_settings_controller.rb b/app/controllers/admin/application_settings_controller.rb
index 210d488f5a3..16254c74ba4 100644
--- a/app/controllers/admin/application_settings_controller.rb
+++ b/app/controllers/admin/application_settings_controller.rb
@@ -219,6 +219,7 @@ class Admin::ApplicationSettingsController < Admin::ApplicationController
:domain_blacklist_file,
:raw_blob_request_limit,
:namespace_storage_size_limit,
+ :issues_create_limit,
disabled_oauth_sign_in_sources: [],
import_sources: [],
repository_storages: [],
diff --git a/app/controllers/groups/settings/ci_cd_controller.rb b/app/controllers/groups/settings/ci_cd_controller.rb
index 989013df8d4..6b842fc9fe1 100644
--- a/app/controllers/groups/settings/ci_cd_controller.rb
+++ b/app/controllers/groups/settings/ci_cd_controller.rb
@@ -114,7 +114,7 @@ module Groups
end
def deploy_token_params
- params.require(:deploy_token).permit(:name, :expires_at, :read_repository, :read_registry, :username)
+ params.require(:deploy_token).permit(:name, :expires_at, :read_repository, :read_registry, :write_registry, :username)
end
end
end
diff --git a/app/controllers/ide_controller.rb b/app/controllers/ide_controller.rb
index 8a838db04f9..bffbdf01f8f 100644
--- a/app/controllers/ide_controller.rb
+++ b/app/controllers/ide_controller.rb
@@ -6,6 +6,10 @@ class IdeController < ApplicationController
include ClientsidePreviewCSP
include StaticObjectExternalStorageCSP
+ before_action do
+ push_frontend_feature_flag(:webide_dark_theme)
+ end
+
def index
Gitlab::UsageDataCounters::WebIdeCounter.increment_views_count
end
diff --git a/app/controllers/import/github_controller.rb b/app/controllers/import/github_controller.rb
index c418b11ab13..34af1ecd6a5 100644
--- a/app/controllers/import/github_controller.rb
+++ b/app/controllers/import/github_controller.rb
@@ -9,6 +9,7 @@ class Import::GithubController < Import::BaseController
before_action :expire_etag_cache, only: [:status, :create]
rescue_from Octokit::Unauthorized, with: :provider_unauthorized
+ rescue_from Octokit::TooManyRequests, with: :provider_rate_limit
def new
if !ci_cd_only? && github_import_configured? && logged_in_with_provider?
@@ -142,6 +143,13 @@ class Import::GithubController < Import::BaseController
alert: "Access denied to your #{Gitlab::ImportSources.title(provider.to_s)} account."
end
+ def provider_rate_limit(exception)
+ reset_time = Time.at(exception.response_headers['x-ratelimit-reset'].to_i)
+ session[access_token_key] = nil
+ redirect_to new_import_url,
+ alert: _("GitHub API rate limit exceeded. Try again after %{reset_time}") % { reset_time: reset_time }
+ end
+
def access_token_key
:"#{provider}_access_token"
end
@@ -180,7 +188,7 @@ class Import::GithubController < Import::BaseController
end
def client_options
- {}
+ { wait_for_rate_limit_reset: false }
end
def extra_import_params
diff --git a/app/controllers/projects/environments_controller.rb b/app/controllers/projects/environments_controller.rb
index e51a5c7b84d..09dc4d118a1 100644
--- a/app/controllers/projects/environments_controller.rb
+++ b/app/controllers/projects/environments_controller.rb
@@ -14,9 +14,7 @@ class Projects::EnvironmentsController < Projects::ApplicationController
before_action :expire_etag_cache, only: [:index], unless: -> { request.format.json? }
before_action only: [:metrics, :additional_metrics, :metrics_dashboard] do
push_frontend_feature_flag(:prometheus_computed_alerts)
- end
- before_action do
- push_frontend_feature_flag(:auto_stop_environments, default_enabled: true)
+ push_frontend_feature_flag(:metrics_dashboard_annotations)
end
after_action :expire_etag_cache, only: [:cancel_auto_stop]
diff --git a/app/controllers/projects/import/jira_controller.rb b/app/controllers/projects/import/jira_controller.rb
index ca427928d85..26d9b4b223f 100644
--- a/app/controllers/projects/import/jira_controller.rb
+++ b/app/controllers/projects/import/jira_controller.rb
@@ -3,14 +3,18 @@
module Projects
module Import
class JiraController < Projects::ApplicationController
+ before_action :authenticate_user!
+ before_action :check_issues_available!
+ before_action :authorize_read_project!
before_action :jira_import_enabled?
before_action :jira_integration_configured?
+ before_action :authorize_admin_project!, only: [:import]
def show
@is_jira_configured = @project.jira_service.present?
return if Feature.enabled?(:jira_issue_import_vue, @project)
- unless @project.latest_jira_import&.in_progress?
+ if !@project.latest_jira_import&.in_progress? && current_user&.can?(:admin_project, @project)
jira_client = @project.jira_service.client
jira_projects = jira_client.Project.all
diff --git a/app/controllers/projects/issues_controller.rb b/app/controllers/projects/issues_controller.rb
index f552c471eb2..51ad8edb012 100644
--- a/app/controllers/projects/issues_controller.rb
+++ b/app/controllers/projects/issues_controller.rb
@@ -42,15 +42,14 @@ class Projects::IssuesController < Projects::ApplicationController
before_action :authorize_import_issues!, only: [:import_csv]
before_action :authorize_download_code!, only: [:related_branches]
+ # Limit the amount of issues created per minute
+ before_action :create_rate_limit, only: [:create]
+
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, project.group)
push_frontend_feature_flag(:save_issuable_health_status, project.group, default_enabled: true)
end
- before_action only: :show do
- push_frontend_feature_flag(:sort_discussions, @project)
- end
-
around_action :allow_gitaly_ref_name_caching, only: [:discussions]
respond_to :html
@@ -296,6 +295,22 @@ class Projects::IssuesController < Projects::ApplicationController
# 3. https://gitlab.com/gitlab-org/gitlab-foss/issues/42426
Gitlab::QueryLimiting.whitelist('https://gitlab.com/gitlab-org/gitlab-foss/issues/42422')
end
+
+ private
+
+ def create_rate_limit
+ key = :issues_create
+
+ if rate_limiter.throttled?(key, scope: [@project, @current_user])
+ rate_limiter.log_request(request, "#{key}_request_limit".to_sym, current_user)
+
+ render plain: _('This endpoint has been requested too many times. Try again later.'), status: :too_many_requests
+ end
+ end
+
+ def rate_limiter
+ ::Gitlab::ApplicationRateLimiter
+ end
end
Projects::IssuesController.prepend_if_ee('EE::Projects::IssuesController')
diff --git a/app/controllers/projects/merge_requests_controller.rb b/app/controllers/projects/merge_requests_controller.rb
index 26de200a1c1..89de40006ff 100644
--- a/app/controllers/projects/merge_requests_controller.rb
+++ b/app/controllers/projects/merge_requests_controller.rb
@@ -24,16 +24,13 @@ class Projects::MergeRequestsController < Projects::MergeRequests::ApplicationCo
push_frontend_feature_flag(:single_mr_diff_view, @project, default_enabled: true)
push_frontend_feature_flag(:suggest_pipeline) if experiment_enabled?(:suggest_pipeline)
push_frontend_feature_flag(:code_navigation, @project)
+ push_frontend_feature_flag(:widget_visibility_polling, @project, default_enabled: true)
end
before_action do
push_frontend_feature_flag(:vue_issuable_sidebar, @project.group)
end
- before_action only: :show do
- push_frontend_feature_flag(:sort_discussions, @project)
- end
-
around_action :allow_gitaly_ref_name_caching, only: [:index, :show, :discussions]
def index
diff --git a/app/controllers/projects/pages_controller.rb b/app/controllers/projects/pages_controller.rb
index 18a171700e9..2a8bc823931 100644
--- a/app/controllers/projects/pages_controller.rb
+++ b/app/controllers/projects/pages_controller.rb
@@ -10,7 +10,7 @@ class Projects::PagesController < Projects::ApplicationController
# rubocop: disable CodeReuse/ActiveRecord
def show
- @domains = @project.pages_domains.order(:domain)
+ @domains = @project.pages_domains.order(:domain).present(current_user: current_user)
end
# rubocop: enable CodeReuse/ActiveRecord
diff --git a/app/controllers/projects/pages_domains_controller.rb b/app/controllers/projects/pages_domains_controller.rb
index 5a81a064048..cdf6f5ce828 100644
--- a/app/controllers/projects/pages_domains_controller.rb
+++ b/app/controllers/projects/pages_domains_controller.rb
@@ -26,6 +26,12 @@ class Projects::PagesDomainsController < Projects::ApplicationController
redirect_to project_pages_domain_path(@project, @domain)
end
+ def retry_auto_ssl
+ PagesDomains::RetryAcmeOrderService.new(@domain.pages_domain).execute
+
+ redirect_to project_pages_domain_path(@project, @domain)
+ end
+
def edit
redirect_to project_pages_domain_path(@project, @domain)
end
@@ -82,6 +88,6 @@ class Projects::PagesDomainsController < Projects::ApplicationController
end
def domain
- @domain ||= @project.pages_domains.find_by_domain!(params[:id].to_s)
+ @domain ||= @project.pages_domains.find_by_domain!(params[:id].to_s).present(current_user: current_user)
end
end
diff --git a/app/controllers/projects/settings/ci_cd_controller.rb b/app/controllers/projects/settings/ci_cd_controller.rb
index 5feb3e019c2..a0f98d8f1d2 100644
--- a/app/controllers/projects/settings/ci_cd_controller.rb
+++ b/app/controllers/projects/settings/ci_cd_controller.rb
@@ -93,7 +93,7 @@ module Projects
end
def deploy_token_params
- params.require(:deploy_token).permit(:name, :expires_at, :read_repository, :read_registry, :username)
+ params.require(:deploy_token).permit(:name, :expires_at, :read_repository, :read_registry, :write_registry, :username)
end
def run_autodevops_pipeline(service)
diff --git a/app/controllers/projects/static_site_editor_controller.rb b/app/controllers/projects/static_site_editor_controller.rb
index 597bfccf422..98ec2335899 100644
--- a/app/controllers/projects/static_site_editor_controller.rb
+++ b/app/controllers/projects/static_site_editor_controller.rb
@@ -1,10 +1,21 @@
# frozen_string_literal: true
class Projects::StaticSiteEditorController < Projects::ApplicationController
+ include ExtractsPath
layout 'fullscreen'
prepend_before_action :authenticate_user!, only: [:show]
+ before_action :assign_ref_and_path, only: [:show]
def show
+ @config = Gitlab::StaticSiteEditor::Config.new(@repository, @ref, @path, params[:return_url])
+ end
+
+ private
+
+ def assign_ref_and_path
+ @ref, @path = extract_ref(params[:id])
+
+ render_404 if @ref.blank? || @path.blank?
end
end
diff --git a/app/graphql/resolvers/merge_requests_resolver.rb b/app/graphql/resolvers/merge_requests_resolver.rb
index cda27890d6b..25121dce005 100644
--- a/app/graphql/resolvers/merge_requests_resolver.rb
+++ b/app/graphql/resolvers/merge_requests_resolver.rb
@@ -20,8 +20,17 @@ module Resolvers
args[:iids] ||= [args[:iid]].compact
- args[:iids].map { |iid| batch_load(iid) }
- .select(&:itself) # .compact doesn't work on BatchLoader
+ if args[:iids].any?
+ batch_load_merge_requests(args[:iids])
+ else
+ args[:project_id] = project.id
+
+ MergeRequestsFinder.new(context[:current_user], args).execute
+ end
+ end
+
+ def batch_load_merge_requests(iids)
+ iids.map { |iid| batch_load(iid) }.select(&:itself) # .compact doesn't work on BatchLoader
end
# rubocop: disable CodeReuse/ActiveRecord
diff --git a/app/graphql/resolvers/projects/jira_imports_resolver.rb b/app/graphql/resolvers/projects/jira_imports_resolver.rb
index b0784b3cdf7..25361c068d9 100644
--- a/app/graphql/resolvers/projects/jira_imports_resolver.rb
+++ b/app/graphql/resolvers/projects/jira_imports_resolver.rb
@@ -16,7 +16,7 @@ module Resolvers
def authorized_resource?(project)
return false unless project.jira_issues_import_feature_flag_enabled?
- Ability.allowed?(context[:current_user], :admin_project, project)
+ context[:current_user].present? && Ability.allowed?(context[:current_user], :read_project, project)
end
end
end
diff --git a/app/mailers/emails/pages_domains.rb b/app/mailers/emails/pages_domains.rb
index 1caca6b3e44..6c3dcf8746b 100644
--- a/app/mailers/emails/pages_domains.rb
+++ b/app/mailers/emails/pages_domains.rb
@@ -41,5 +41,16 @@ module Emails
subject: subject("ACTION REQUIRED: Verification failed for GitLab Pages domain '#{domain.domain}'")
)
end
+
+ def pages_domain_auto_ssl_failed_email(domain, recipient)
+ @domain = domain
+ @project = domain.project
+
+ subject_text = _("ACTION REQUIRED: Something went wrong while obtaining the Let's Encrypt certificate for GitLab Pages domain '%{domain}'") % { domain: domain.domain }
+ mail(
+ to: recipient.notification_email_for(@project.group),
+ subject: subject(subject_text)
+ )
+ end
end
end
diff --git a/app/models/application_setting_implementation.rb b/app/models/application_setting_implementation.rb
index 920ad3286d1..c96f086684f 100644
--- a/app/models/application_setting_implementation.rb
+++ b/app/models/application_setting_implementation.rb
@@ -79,6 +79,7 @@ module ApplicationSettingImplementation
housekeeping_gc_period: 200,
housekeeping_incremental_repack_period: 10,
import_sources: Settings.gitlab['import_sources'],
+ issues_create_limit: 300,
local_markdown_version: 0,
max_artifacts_size: Settings.artifacts['max_size'],
max_attachment_size: Settings.gitlab['max_attachment_size'],
diff --git a/app/models/ci/job_artifact.rb b/app/models/ci/job_artifact.rb
index ef0701b3874..c4ac10814a9 100644
--- a/app/models/ci/job_artifact.rb
+++ b/app/models/ci/job_artifact.rb
@@ -73,12 +73,14 @@ module Ci
validates :file_format, presence: true, unless: :trace?, on: :create
validate :valid_file_format?, unless: :trace?, on: :create
- before_save :set_size, if: :file_changed?
- update_project_statistics project_statistics_name: :build_artifacts_size
+ before_save :set_size, if: :file_changed?
+ before_save :set_file_store, if: ->(job_artifact) { job_artifact.file_store.nil? }
after_save :update_file_store, if: :saved_change_to_file?
+ update_project_statistics project_statistics_name: :build_artifacts_size
+
scope :with_files_stored_locally, -> { where(file_store: [nil, ::JobArtifactUploader::Store::LOCAL]) }
scope :with_files_stored_remotely, -> { where(file_store: ::JobArtifactUploader::Store::REMOTE) }
scope :for_sha, ->(sha, project_id) { joins(job: :pipeline).where(ci_pipelines: { sha: sha, project_id: project_id }) }
@@ -226,6 +228,15 @@ module Ci
self.size = file.size
end
+ def set_file_store
+ self.file_store =
+ if JobArtifactUploader.object_store_enabled? && JobArtifactUploader.direct_upload_enabled?
+ JobArtifactUploader::Store::REMOTE
+ else
+ file.object_store
+ end
+ end
+
def project_destroyed?
# Use job.project to avoid extra DB query for project
job.project.pending_delete?
diff --git a/app/models/ci/pipeline.rb b/app/models/ci/pipeline.rb
index 3ce44a066ae..8a3ca2e758c 100644
--- a/app/models/ci/pipeline.rb
+++ b/app/models/ci/pipeline.rb
@@ -736,6 +736,7 @@ module Ci
MergeRequest.where(id: merge_request_id)
else
MergeRequest.where(source_project_id: project_id, source_branch: ref)
+ .by_commit_sha(sha)
end
end
diff --git a/app/models/deploy_token.rb b/app/models/deploy_token.rb
index a9844f627b7..69245710f01 100644
--- a/app/models/deploy_token.rb
+++ b/app/models/deploy_token.rb
@@ -7,7 +7,7 @@ class DeployToken < ApplicationRecord
include Gitlab::Utils::StrongMemoize
add_authentication_token_field :token, encrypted: :optional
- AVAILABLE_SCOPES = %i(read_repository read_registry).freeze
+ AVAILABLE_SCOPES = %i(read_repository read_registry write_registry).freeze
GITLAB_DEPLOY_TOKEN_NAME = 'gitlab-deploy-token'
default_value_for(:expires_at) { Forever.date }
@@ -105,7 +105,7 @@ class DeployToken < ApplicationRecord
end
def ensure_at_least_one_scope
- errors.add(:base, _("Scopes can't be blank")) unless read_repository || read_registry
+ errors.add(:base, _("Scopes can't be blank")) unless read_repository || read_registry || write_registry
end
def default_username
diff --git a/app/models/diff_note_position.rb b/app/models/diff_note_position.rb
new file mode 100644
index 00000000000..78e4fbc49eb
--- /dev/null
+++ b/app/models/diff_note_position.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+class DiffNotePosition < ApplicationRecord
+ belongs_to :note
+
+ enum diff_content_type: {
+ text: 0,
+ image: 1
+ }
+
+ enum diff_type: {
+ head: 0
+ }
+
+ def position
+ Gitlab::Diff::Position.new(
+ old_path: old_path,
+ new_path: new_path,
+ old_line: old_line,
+ new_line: new_line,
+ position_type: diff_content_type,
+ diff_refs: Gitlab::Diff::DiffRefs.new(
+ base_sha: base_sha,
+ start_sha: start_sha,
+ head_sha: head_sha
+ )
+ )
+ end
+
+ def position=(position)
+ position_attrs = position.to_h
+ position_attrs[:diff_content_type] = position_attrs.delete(:position_type)
+
+ assign_attributes(position_attrs)
+ end
+end
diff --git a/app/models/group.rb b/app/models/group.rb
index 203ed1694b7..f4eaa581d54 100644
--- a/app/models/group.rb
+++ b/app/models/group.rb
@@ -475,6 +475,16 @@ class Group < Namespace
false
end
+ def wiki_access_level
+ # TODO: Remove this method once we implement group-level features.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/208412
+ if Feature.enabled?(:group_wiki, self)
+ ProjectFeature::ENABLED
+ else
+ ProjectFeature::DISABLED
+ end
+ end
+
private
def update_two_factor_requirement
diff --git a/app/models/jira_import_state.rb b/app/models/jira_import_state.rb
index ec1b8f03d36..543ee77917c 100644
--- a/app/models/jira_import_state.rb
+++ b/app/models/jira_import_state.rb
@@ -12,6 +12,8 @@ class JiraImportState < ApplicationRecord
belongs_to :user
belongs_to :label
+ scope :by_jira_project_key, -> (jira_project_key) { where(jira_project_key: jira_project_key) }
+
validates :project, presence: true
validates :jira_project_key, presence: true
validates :jira_project_name, presence: true
diff --git a/app/models/lfs_object.rb b/app/models/lfs_object.rb
index 6a86aebae39..c5233deaa96 100644
--- a/app/models/lfs_object.rb
+++ b/app/models/lfs_object.rb
@@ -17,6 +17,8 @@ class LfsObject < ApplicationRecord
mount_uploader :file, LfsObjectUploader
+ before_save :set_file_store, if: ->(lfs_object) { lfs_object.file_store.nil? }
+
after_save :update_file_store, if: :saved_change_to_file?
def self.not_linked_to_project(project)
@@ -55,6 +57,17 @@ class LfsObject < ApplicationRecord
def self.calculate_oid(path)
self.hexdigest(path)
end
+
+ private
+
+ def set_file_store
+ self.file_store =
+ if LfsObjectUploader.object_store_enabled? && LfsObjectUploader.direct_upload_enabled?
+ LfsObjectUploader::Store::REMOTE
+ else
+ file.object_store
+ end
+ end
end
LfsObject.prepend_if_ee('EE::LfsObject')
diff --git a/app/models/namespace.rb b/app/models/namespace.rb
index 260ba9ea4a5..9e7589a1f18 100644
--- a/app/models/namespace.rb
+++ b/app/models/namespace.rb
@@ -11,6 +11,9 @@ class Namespace < ApplicationRecord
include FeatureGate
include FromUnion
include Gitlab::Utils::StrongMemoize
+ include IgnorableColumns
+
+ ignore_column :plan_id, remove_with: '13.1', remove_after: '2020-06-22'
# Prevent users from creating unreasonably deep level of nesting.
# The number 20 was taken based on maximum nesting level of
diff --git a/app/models/pages_domain.rb b/app/models/pages_domain.rb
index 37d45c5934d..486da2c6b45 100644
--- a/app/models/pages_domain.rb
+++ b/app/models/pages_domain.rb
@@ -1,6 +1,8 @@
# frozen_string_literal: true
class PagesDomain < ApplicationRecord
+ include Presentable
+
VERIFICATION_KEY = 'gitlab-pages-verification-code'
VERIFICATION_THRESHOLD = 3.days.freeze
SSL_RENEWAL_THRESHOLD = 30.days.freeze
@@ -13,6 +15,8 @@ class PagesDomain < ApplicationRecord
has_many :acme_orders, class_name: "PagesDomainAcmeOrder"
has_many :serverless_domain_clusters, class_name: 'Serverless::DomainCluster', inverse_of: :pages_domain
+ before_validation :clear_auto_ssl_failure, unless: :auto_ssl_enabled
+
validates :domain, hostname: { allow_numeric_hostname: true }
validates :domain, uniqueness: { case_sensitive: false }
validates :certificate, :key, presence: true, if: :usage_serverless?
@@ -208,6 +212,10 @@ class PagesDomain < ApplicationRecord
Pages::VirtualDomain.new([project], domain: self)
end
+ def clear_auto_ssl_failure
+ self.auto_ssl_failed = false
+ end
+
private
def pages_deployed?
diff --git a/app/models/project.rb b/app/models/project.rb
index ee4cc6157eb..443b44dd023 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -1190,14 +1190,14 @@ class Project < ApplicationRecord
end
def external_issue_tracker
- if has_external_issue_tracker.nil? # To populate existing projects
+ if has_external_issue_tracker.nil?
cache_has_external_issue_tracker
end
if has_external_issue_tracker?
- return @external_issue_tracker if defined?(@external_issue_tracker)
-
- @external_issue_tracker = services.external_issue_trackers.first
+ strong_memoize(:external_issue_tracker) do
+ services.external_issue_trackers.first
+ end
else
nil
end
@@ -1217,7 +1217,7 @@ class Project < ApplicationRecord
def external_wiki
if has_external_wiki.nil?
- cache_has_external_wiki # Populate
+ cache_has_external_wiki
end
if has_external_wiki
diff --git a/app/models/project_services/chat_message/pipeline_message.rb b/app/models/project_services/chat_message/pipeline_message.rb
index 52a26f6211a..50b982a803f 100644
--- a/app/models/project_services/chat_message/pipeline_message.rb
+++ b/app/models/project_services/chat_message/pipeline_message.rb
@@ -34,7 +34,9 @@ module ChatMessage
@duration = pipeline_attributes[:duration].to_i
@finished_at = pipeline_attributes[:finished_at] ? Time.parse(pipeline_attributes[:finished_at]).to_i : nil
@pipeline_id = pipeline_attributes[:id]
- @failed_jobs = Array(data[:builds]).select { |b| b[:status] == 'failed' }.reverse # Show failed jobs from oldest to newest
+
+ # Get list of jobs that have actually failed (after exhausting all retries)
+ @failed_jobs = actually_failed_jobs(Array(data[:builds]))
@failed_stages = @failed_jobs.map { |j| j[:stage] }.uniq
@project = Project.find(data[:project][:id])
@@ -90,6 +92,17 @@ module ChatMessage
private
+ def actually_failed_jobs(builds)
+ succeeded_job_names = builds.map { |b| b[:name] if b[:status] == 'success' }.compact.uniq
+
+ failed_jobs = builds.select do |build|
+ # Select jobs which doesn't have a successful retry
+ build[:status] == 'failed' && !succeeded_job_names.include?(build[:name])
+ end
+
+ failed_jobs.uniq { |job| job[:name] }.reverse
+ end
+
def fancy_notifications?
Feature.enabled?(:fancy_pipeline_slack_notifications, default_enabled: true)
end
diff --git a/app/models/project_services/prometheus_service.rb b/app/models/project_services/prometheus_service.rb
index 30dfcc11417..1a85289a04f 100644
--- a/app/models/project_services/prometheus_service.rb
+++ b/app/models/project_services/prometheus_service.rb
@@ -153,6 +153,6 @@ class PrometheusService < MonitoringService
def create_default_alerts
return unless project_id
- Prometheus::CreateDefaultAlertsWorker.perform_async(project_id: project_id)
+ Prometheus::CreateDefaultAlertsWorker.perform_async(project_id)
end
end
diff --git a/app/policies/project_policy/class_methods.rb b/app/policies/concerns/crud_policy_helpers.rb
index 42d993406a9..d8521ca22cc 100644
--- a/app/policies/project_policy/class_methods.rb
+++ b/app/policies/concerns/crud_policy_helpers.rb
@@ -1,7 +1,9 @@
# frozen_string_literal: true
-class ProjectPolicy
- module ClassMethods
+module CrudPolicyHelpers
+ extend ActiveSupport::Concern
+
+ class_methods do
def create_read_update_admin_destroy(name)
[
:"read_#{name}",
diff --git a/app/policies/group_policy.rb b/app/policies/group_policy.rb
index 5e252c8e564..a34217d90dd 100644
--- a/app/policies/group_policy.rb
+++ b/app/policies/group_policy.rb
@@ -1,6 +1,7 @@
# frozen_string_literal: true
class GroupPolicy < BasePolicy
+ include CrudPolicyHelpers
include FindGroupProjects
desc "Group is public"
@@ -42,15 +43,23 @@ class GroupPolicy < BasePolicy
@subject.subgroup_creation_level == ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS
end
+ desc "Group has wiki disabled"
+ condition(:wiki_disabled, score: 32) { !feature_available?(:wiki) }
+
rule { public_group }.policy do
enable :read_group
enable :read_package
+ enable :read_wiki
end
- rule { logged_in_viewable }.enable :read_group
+ rule { logged_in_viewable }.policy do
+ enable :read_group
+ enable :read_wiki
+ end
rule { guest }.policy do
enable :read_group
+ enable :read_wiki
enable :upload_file
end
@@ -78,10 +87,12 @@ class GroupPolicy < BasePolicy
enable :create_metrics_dashboard_annotation
enable :delete_metrics_dashboard_annotation
enable :update_metrics_dashboard_annotation
+ enable :create_wiki
end
rule { reporter }.policy do
enable :read_container_image
+ enable :download_wiki_code
enable :admin_label
enable :admin_list
enable :admin_issue
@@ -100,6 +111,7 @@ class GroupPolicy < BasePolicy
enable :destroy_deploy_token
enable :read_deploy_token
enable :create_deploy_token
+ enable :admin_wiki
end
rule { owner }.policy do
@@ -145,6 +157,11 @@ class GroupPolicy < BasePolicy
rule { maintainer & can?(:create_projects) }.enable :transfer_projects
+ rule { wiki_disabled }.policy do
+ prevent(*create_read_update_admin_destroy(:wiki))
+ prevent(:download_wiki_code)
+ end
+
def access_level
return GroupMember::NO_ACCESS if @user.nil?
@@ -154,6 +171,21 @@ class GroupPolicy < BasePolicy
def lookup_access_level!
@subject.max_member_access_for_user(@user)
end
+
+ # TODO: Extract this into a helper shared with ProjectPolicy, once we implement group-level features.
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/208412
+ def feature_available?(feature)
+ return false unless feature == :wiki
+
+ case @subject.wiki_access_level
+ when ProjectFeature::DISABLED
+ false
+ when ProjectFeature::PRIVATE
+ admin? || access_level >= ProjectFeature.required_minimum_access_level(feature)
+ else
+ true
+ end
+ end
end
GroupPolicy.prepend_if_ee('EE::GroupPolicy')
diff --git a/app/policies/issue_policy.rb b/app/policies/issue_policy.rb
index f86892227df..20df823c737 100644
--- a/app/policies/issue_policy.rb
+++ b/app/policies/issue_policy.rb
@@ -5,7 +5,7 @@ class IssuePolicy < IssuablePolicy
# Make sure to sync this class checks with issue.rb to avoid security problems.
# Check commit 002ad215818450d2cbbc5fa065850a953dc7ada8 for more information.
- extend ProjectPolicy::ClassMethods
+ include CrudPolicyHelpers
desc "User can read confidential issues"
condition(:can_read_confidential) do
diff --git a/app/policies/project_policy.rb b/app/policies/project_policy.rb
index 0f5e4ac378e..7454343a357 100644
--- a/app/policies/project_policy.rb
+++ b/app/policies/project_policy.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class ProjectPolicy < BasePolicy
- extend ClassMethods
+ include CrudPolicyHelpers
READONLY_FEATURES_WHEN_ARCHIVED = %i[
issue
diff --git a/app/services/auth/container_registry_authentication_service.rb b/app/services/auth/container_registry_authentication_service.rb
index 629c1cbdc5c..4a699fe3213 100644
--- a/app/services/auth/container_registry_authentication_service.rb
+++ b/app/services/auth/container_registry_authentication_service.rb
@@ -135,7 +135,7 @@ module Auth
when 'pull'
build_can_pull?(requested_project) || user_can_pull?(requested_project) || deploy_token_can_pull?(requested_project)
when 'push'
- build_can_push?(requested_project) || user_can_push?(requested_project)
+ build_can_push?(requested_project) || user_can_push?(requested_project) || deploy_token_can_push?(requested_project)
when 'delete'
build_can_delete?(requested_project) || user_can_admin?(requested_project)
when '*'
@@ -185,6 +185,13 @@ module Auth
current_user.read_registry?
end
+ def deploy_token_can_push?(requested_project)
+ has_authentication_ability?(:create_container_image) &&
+ current_user.is_a?(DeployToken) &&
+ current_user.has_access_to?(requested_project) &&
+ current_user.write_registry?
+ end
+
##
# We still support legacy pipeline triggers which do not have associated
# actor. New permissions model and new triggers are always associated with
diff --git a/app/services/clusters/create_service.rb b/app/services/clusters/create_service.rb
index 5c26c611e00..7b5bf6b32c2 100644
--- a/app/services/clusters/create_service.rb
+++ b/app/services/clusters/create_service.rb
@@ -23,6 +23,8 @@ module Clusters
cluster.errors.add(:base, _('Instance does not support multiple Kubernetes clusters'))
end
+ validate_management_project_permissions(cluster)
+
return cluster if cluster.errors.present?
cluster.tap do |cluster|
@@ -57,6 +59,11 @@ module Clusters
def can_create_cluster?
clusterable.clusters.empty?
end
+
+ def validate_management_project_permissions(cluster)
+ Clusters::Management::ValidateManagementProjectPermissionsService.new(current_user)
+ .execute(cluster, params[:management_project_id])
+ end
end
end
diff --git a/app/services/clusters/management/validate_management_project_permissions_service.rb b/app/services/clusters/management/validate_management_project_permissions_service.rb
new file mode 100644
index 00000000000..e89a0afe6d2
--- /dev/null
+++ b/app/services/clusters/management/validate_management_project_permissions_service.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+module Clusters
+ module Management
+ class ValidateManagementProjectPermissionsService
+ attr_reader :current_user
+
+ def initialize(user = nil)
+ @current_user = user
+ end
+
+ def execute(cluster, management_project_id)
+ if management_project_id.present?
+ management_project = management_project_scope(cluster).find_by_id(management_project_id)
+
+ unless management_project && can_admin_pipeline_for_project?(management_project)
+ cluster.errors.add(:management_project_id, _('Project does not exist or you don\'t have permission to perform this action'))
+
+ return false
+ end
+ end
+
+ true
+ end
+
+ private
+
+ def can_admin_pipeline_for_project?(project)
+ Ability.allowed?(current_user, :admin_pipeline, project)
+ end
+
+ def management_project_scope(cluster)
+ return ::Project.all if cluster.instance_type?
+
+ group =
+ if cluster.group_type?
+ cluster.first_group
+ elsif cluster.project_type?
+ cluster.first_project&.namespace
+ end
+
+ # Prevent users from selecting nested projects until
+ # https://gitlab.com/gitlab-org/gitlab/issues/34650 is resolved
+ include_subgroups = cluster.group_type?
+
+ ::GroupProjectsFinder.new(
+ group: group,
+ current_user: current_user,
+ options: { only_owned: true, include_subgroups: include_subgroups }
+ ).execute
+ end
+ end
+ end
+end
diff --git a/app/services/clusters/update_service.rb b/app/services/clusters/update_service.rb
index 8cb77040b14..2315df612a1 100644
--- a/app/services/clusters/update_service.rb
+++ b/app/services/clusters/update_service.rb
@@ -18,46 +18,9 @@ module Clusters
private
- def can_admin_pipeline_for_project?(project)
- Ability.allowed?(current_user, :admin_pipeline, project)
- end
-
def validate_params(cluster)
- if params[:management_project_id].present?
- management_project = management_project_scope(cluster).find_by_id(params[:management_project_id])
-
- unless management_project
- cluster.errors.add(:management_project_id, _('Project does not exist or you don\'t have permission to perform this action'))
-
- return false
- end
-
- unless can_admin_pipeline_for_project?(management_project)
- # Use same message as not found to prevent enumeration
- cluster.errors.add(:management_project_id, _('Project does not exist or you don\'t have permission to perform this action'))
-
- return false
- end
- end
-
- true
- end
-
- def management_project_scope(cluster)
- return ::Project.all if cluster.instance_type?
-
- group =
- if cluster.group_type?
- cluster.first_group
- elsif cluster.project_type?
- cluster.first_project&.namespace
- end
-
- # Prevent users from selecting nested projects until
- # https://gitlab.com/gitlab-org/gitlab/issues/34650 is resolved
- include_subgroups = cluster.group_type?
-
- ::GroupProjectsFinder.new(group: group, current_user: current_user, options: { only_owned: true, include_subgroups: include_subgroups }).execute
+ ::Clusters::Management::ValidateManagementProjectPermissionsService.new(current_user)
+ .execute(cluster, params[:management_project_id])
end
end
end
diff --git a/app/services/environments/auto_stop_service.rb b/app/services/environments/auto_stop_service.rb
index ee7f25a4d76..bde598abf66 100644
--- a/app/services/environments/auto_stop_service.rb
+++ b/app/services/environments/auto_stop_service.rb
@@ -30,7 +30,7 @@ module Environments
def stop_in_batch
environments = Environment.auto_stoppable(BATCH_SIZE)
- return false unless environments.exists? && Feature.enabled?(:auto_stop_environments, default_enabled: true)
+ return false unless environments.exists?
Ci::StopEnvironmentsService.execute_in_batch(environments)
end
diff --git a/app/services/jira_import/start_import_service.rb b/app/services/jira_import/start_import_service.rb
index 134cef089e7..e8d9e6734bd 100644
--- a/app/services/jira_import/start_import_service.rb
+++ b/app/services/jira_import/start_import_service.rb
@@ -33,8 +33,10 @@ module JiraImport
end
def build_jira_import
+ label = create_import_label(project)
project.jira_imports.build(
user: user,
+ label: label,
jira_project_key: jira_project_key,
# we do not have the jira_project_name or jira_project_xid yet so just set a mock value,
# we will once https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28190
@@ -43,9 +45,26 @@ module JiraImport
)
end
+ def create_import_label(project)
+ label = ::Labels::CreateService.new(build_label_attrs(project)).execute(project: project)
+ raise Projects::ImportService::Error, _('Failed to create import label for jira import.') if label.blank?
+
+ label
+ end
+
+ def build_label_attrs(project)
+ import_start_time = Time.zone.now
+ jira_imports_for_project = project.jira_imports.by_jira_project_key(jira_project_key).size + 1
+ title = "jira-import::#{jira_project_key}-#{jira_imports_for_project}"
+ description = "Label for issues that were imported from jira on #{import_start_time.strftime('%Y-%m-%d %H:%M:%S')}"
+ color = "#{Label.color_for(title)}"
+ { title: title, description: description, color: color }
+ end
+
def validate
return build_error_response(_('Jira import feature is disabled.')) unless project.jira_issues_import_feature_flag_enabled?
return build_error_response(_('You do not have permissions to run the import.')) unless user.can?(:admin_project, project)
+ return build_error_response(_('Cannot import because issues are not available in this project.')) unless project.feature_available?(:issues, user)
return build_error_response(_('Jira integration not configured.')) unless project.jira_service&.active?
return build_error_response(_('Unable to find Jira project to import data from.')) if jira_project_key.blank?
return build_error_response(_('Jira import is already running.')) if import_in_progress?
diff --git a/app/services/metrics/dashboard/base_service.rb b/app/services/metrics/dashboard/base_service.rb
index 219b26defb1..c112d75a9b5 100644
--- a/app/services/metrics/dashboard/base_service.rb
+++ b/app/services/metrics/dashboard/base_service.rb
@@ -12,7 +12,8 @@ module Metrics
STAGES::CommonMetricsInserter,
STAGES::EndpointInserter,
STAGES::PanelIdsInserter,
- STAGES::Sorter
+ STAGES::Sorter,
+ STAGES::AlertsInserter
].freeze
def get_dashboard
@@ -117,5 +118,3 @@ module Metrics
end
end
end
-
-Metrics::Dashboard::BaseService.prepend_if_ee('EE::Metrics::Dashboard::BaseService')
diff --git a/app/services/metrics/dashboard/system_dashboard_service.rb b/app/services/metrics/dashboard/system_dashboard_service.rb
index c28b7b875df..ed4b78ba159 100644
--- a/app/services/metrics/dashboard/system_dashboard_service.rb
+++ b/app/services/metrics/dashboard/system_dashboard_service.rb
@@ -14,7 +14,8 @@ module Metrics
STAGES::CustomMetricsDetailsInserter,
STAGES::EndpointInserter,
STAGES::PanelIdsInserter,
- STAGES::Sorter
+ STAGES::Sorter,
+ STAGES::AlertsInserter
].freeze
class << self
@@ -30,5 +31,3 @@ module Metrics
end
end
end
-
-Metrics::Dashboard::SystemDashboardService.prepend_if_ee('EE::Metrics::Dashboard::SystemDashboardService')
diff --git a/app/services/notification_service.rb b/app/services/notification_service.rb
index 62827f20929..91e19d190bd 100644
--- a/app/services/notification_service.rb
+++ b/app/services/notification_service.rb
@@ -489,6 +489,12 @@ class NotificationService
end
end
+ def pages_domain_auto_ssl_failed(domain)
+ project_maintainers_recipients(domain, action: 'disabled').each do |recipient|
+ mailer.pages_domain_auto_ssl_failed_email(domain, recipient.user).deliver_later
+ end
+ end
+
def issue_due(issue)
recipients = NotificationRecipients::BuildService.build_recipients(
issue,
diff --git a/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb b/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb
index 93445dd4ddd..1c03641469e 100644
--- a/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb
+++ b/app/services/pages_domains/obtain_lets_encrypt_certificate_service.rb
@@ -57,6 +57,8 @@ module PagesDomains
pages_domain.save!(validate: false)
acme_order.destroy!
+
+ NotificationService.new.pages_domain_auto_ssl_failed(pages_domain)
end
def log_error(api_order)
diff --git a/app/services/pages_domains/retry_acme_order_service.rb b/app/services/pages_domains/retry_acme_order_service.rb
new file mode 100644
index 00000000000..ef3d8ce0b67
--- /dev/null
+++ b/app/services/pages_domains/retry_acme_order_service.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+module PagesDomains
+ class RetryAcmeOrderService
+ attr_reader :pages_domain
+
+ def initialize(pages_domain)
+ @pages_domain = pages_domain
+ end
+
+ def execute
+ updated = pages_domain.with_lock do
+ next unless pages_domain.auto_ssl_enabled && pages_domain.auto_ssl_failed
+
+ pages_domain.update!(auto_ssl_failed: false)
+ end
+
+ PagesDomainSslRenewalWorker.perform_async(pages_domain.id) if updated
+ end
+ end
+end
diff --git a/app/services/projects/update_repository_storage_service.rb b/app/services/projects/update_repository_storage_service.rb
index 0602089a3ab..2e5de9411d1 100644
--- a/app/services/projects/update_repository_storage_service.rb
+++ b/app/services/projects/update_repository_storage_service.rb
@@ -5,12 +5,15 @@ module Projects
include Gitlab::ShellAdapter
Error = Class.new(StandardError)
+ SameFilesystemError = Class.new(Error)
def initialize(project)
@project = project
end
def execute(new_repository_storage_key)
+ raise SameFilesystemError if same_filesystem?(project.repository.storage, new_repository_storage_key)
+
mirror_repositories(new_repository_storage_key)
mark_old_paths_for_archive
@@ -33,6 +36,10 @@ module Projects
private
+ def same_filesystem?(old_storage, new_storage)
+ Gitlab::GitalyClient.filesystem_id(old_storage) == Gitlab::GitalyClient.filesystem_id(new_storage)
+ end
+
def mirror_repositories(new_repository_storage_key)
mirror_repository(new_repository_storage_key)
diff --git a/app/services/prometheus/create_default_alerts_service.rb b/app/services/prometheus/create_default_alerts_service.rb
index 3eb5ad7711a..c87cbbbe3cf 100644
--- a/app/services/prometheus/create_default_alerts_service.rb
+++ b/app/services/prometheus/create_default_alerts_service.rb
@@ -16,6 +16,11 @@ module Prometheus
identifier: 'response_metrics_nginx_ingress_http_error_rate',
operator: 'gt',
threshold: 0.1
+ },
+ {
+ identifier: 'response_metrics_nginx_http_error_percentage',
+ operator: 'gt',
+ threshold: 0.1
}
].freeze
diff --git a/app/uploaders/records_uploads.rb b/app/uploaders/records_uploads.rb
index 967fcdc704e..427314a87bb 100644
--- a/app/uploaders/records_uploads.rb
+++ b/app/uploaders/records_uploads.rb
@@ -56,10 +56,31 @@ module RecordsUploads
size: file.size,
path: upload_path,
model: model,
- mount_point: mounted_as
+ mount_point: mounted_as,
+ store: initial_store
)
end
+ def initial_store
+ if immediately_remote_stored?
+ ::ObjectStorage::Store::REMOTE
+ else
+ ::ObjectStorage::Store::LOCAL
+ end
+ end
+
+ def immediately_remote_stored?
+ object_storage_available? && direct_upload_enabled?
+ end
+
+ def object_storage_available?
+ self.class.ancestors.include?(ObjectStorage::Concern)
+ end
+
+ def direct_upload_enabled?
+ self.class.object_store_enabled? && self.class.direct_upload_enabled?
+ end
+
# Before removing an attachment, destroy any Upload records at the same path
#
# Called `before :remove`
diff --git a/app/views/admin/application_settings/_issue_limits.html.haml b/app/views/admin/application_settings/_issue_limits.html.haml
new file mode 100644
index 00000000000..5906358fbb1
--- /dev/null
+++ b/app/views/admin/application_settings/_issue_limits.html.haml
@@ -0,0 +1,9 @@
+= form_for @application_setting, url: network_admin_application_settings_path(anchor: 'js-issue-limits-settings'), html: { class: 'fieldset-form' } do |f|
+ = form_errors(@application_setting)
+
+ %fieldset
+ .form-group
+ = f.label :issues_create_limit, 'Max requests per second per user', class: 'label-bold'
+ = f.number_field :issues_create_limit, class: 'form-control'
+
+ = f.submit 'Save changes', class: "btn btn-success", data: { qa_selector: 'save_changes_button' }
diff --git a/app/views/admin/application_settings/network.html.haml b/app/views/admin/application_settings/network.html.haml
index 8d88dedf832..db4611964b4 100644
--- a/app/views/admin/application_settings/network.html.haml
+++ b/app/views/admin/application_settings/network.html.haml
@@ -46,4 +46,15 @@
.settings-content
= render 'protected_paths'
+%section.settings.as-issue-limits.no-animate#js-issue-limits-settings{ class: ('expanded' if expanded_by_default?) }
+ .settings-header
+ %h4
+ = _('Issues Rate Limits')
+ %button.btn.btn-default.js-settings-toggle{ type: 'button' }
+ = expanded_by_default? ? _('Collapse') : _('Expand')
+ %p
+ = _('Configure limit for issues created per minute by web and API requests.')
+ .settings-content
+ = render 'issue_limits'
+
= render_if_exists 'admin/application_settings/ee_network_settings'
diff --git a/app/views/admin/deploy_keys/index.html.haml b/app/views/admin/deploy_keys/index.html.haml
index 9fffa97f969..4e9cfc13af0 100644
--- a/app/views/admin/deploy_keys/index.html.haml
+++ b/app/views/admin/deploy_keys/index.html.haml
@@ -1,7 +1,7 @@
- page_title _('Deploy Keys')
%h3.page-title.deploy-keys-title
- = _('Public deploy keys (%{deploy_keys_count})') % { deploy_keys_count: @deploy_keys.count }
+ = _('Public deploy keys (%{deploy_keys_count})') % { deploy_keys_count: @deploy_keys.load.size }
.float-right
= link_to _('New deploy key'), new_admin_deploy_key_path, class: 'btn btn-success btn-sm btn-inverted'
diff --git a/app/views/layouts/_page.html.haml b/app/views/layouts/_page.html.haml
index 16089a00386..06e3bca99a1 100644
--- a/app/views/layouts/_page.html.haml
+++ b/app/views/layouts/_page.html.haml
@@ -5,7 +5,7 @@
.mobile-overlay
.alert-wrapper
= render 'shared/outdated_browser'
- = render_if_exists "layouts/header/ee_subscribable_banner"
+ = render_if_exists "layouts/header/ee_license_banner"
= render "layouts/broadcast"
= render "layouts/header/read_only_banner"
= render "layouts/nav/classification_level_banner"
diff --git a/app/views/layouts/nav/_dashboard.html.haml b/app/views/layouts/nav/_dashboard.html.haml
index 6fc06030d7a..0b23a06f5a9 100644
--- a/app/views/layouts/nav/_dashboard.html.haml
+++ b/app/views/layouts/nav/_dashboard.html.haml
@@ -26,7 +26,7 @@
%ul
- if dashboard_nav_link?(:groups)
%li.d-md-none
- = link_to dashboard_groups_path do
+ = link_to dashboard_groups_path, class: 'dashboard-shortcuts-groups' do
= _('Groups')
- if dashboard_nav_link?(:activity)
= nav_link(path: 'dashboard#activity') do
diff --git a/app/views/notify/pages_domain_auto_ssl_failed_email.html.haml b/app/views/notify/pages_domain_auto_ssl_failed_email.html.haml
new file mode 100644
index 00000000000..1bc2cc15616
--- /dev/null
+++ b/app/views/notify/pages_domain_auto_ssl_failed_email.html.haml
@@ -0,0 +1,11 @@
+%p
+ = _("Something went wrong while obtaining the Let's Encrypt certificate.")
+%p
+ #{_('Project')}: #{link_to @project.human_name, project_url(@project)}
+%p
+ #{_('Domain')}: #{link_to @domain.domain, project_pages_domain_url(@project, @domain)}
+%p
+ - docs_url = help_page_url('user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md', anchor: 'troubleshooting')
+ - link_start = '<a href="%{url}" target="_blank" rel="noopener noreferrer">'.html_safe % { url: docs_url }
+ - link_end = '</a>'.html_safe
+ = _("Please follow the %{link_start}Let\'s Encrypt troubleshooting instructions%{link_end} to re-obtain your Let's Encrypt certificate.").html_safe % { link_start: link_start, link_end: link_end }
diff --git a/app/views/notify/pages_domain_auto_ssl_failed_email.text.haml b/app/views/notify/pages_domain_auto_ssl_failed_email.text.haml
new file mode 100644
index 00000000000..6f20d11c966
--- /dev/null
+++ b/app/views/notify/pages_domain_auto_ssl_failed_email.text.haml
@@ -0,0 +1,7 @@
+= _("Something went wrong while obtaining the Let's Encrypt certificate.").html_safe
+
+#{_('Project')}: #{project_url(@project)}
+#{_('Domain')}: #{project_pages_domain_url(@project, @domain)}
+
+- docs_url = help_page_url('user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md', anchor: 'troubleshooting')
+= _("Please follow the Let\'s Encrypt troubleshooting instructions to re-obtain your Let's Encrypt certificate: %{docs_url}.").html_safe % { docs_url: docs_url }
diff --git a/app/views/profiles/emails/index.html.haml b/app/views/profiles/emails/index.html.haml
index 6ea4eeb66c5..e28c74dd650 100644
--- a/app/views/profiles/emails/index.html.haml
+++ b/app/views/profiles/emails/index.html.haml
@@ -18,7 +18,7 @@
= f.submit _('Add email address'), class: 'btn btn-success', data: { qa_selector: 'add_email_address_button' }
%hr
%h4.prepend-top-0
- = _('Linked emails (%{email_count})') % { email_count: @emails.count + 1 }
+ = _('Linked emails (%{email_count})') % { email_count: @emails.load.size + 1 }
.account-well.append-bottom-default
%ul
%li
diff --git a/app/views/projects/_flash_messages.html.haml b/app/views/projects/_flash_messages.html.haml
index 52bfc4f1b14..f9222387e97 100644
--- a/app/views/projects/_flash_messages.html.haml
+++ b/app/views/projects/_flash_messages.html.haml
@@ -8,5 +8,4 @@
- unless project.empty_repo?
= render 'shared/auto_devops_implicitly_enabled_banner', project: project
= render_if_exists 'projects/above_size_limit_warning', project: project
- = render_if_exists "layouts/header/ee_subscribable_banner", subscription: true
= render_if_exists 'shared/shared_runners_minutes_limit', project: project, classes: [container_class, ("limit-container-width" unless fluid_layout)]
diff --git a/app/views/projects/issues/_related_branches.html.haml b/app/views/projects/issues/_related_branches.html.haml
index 6da4956a036..69b030ed76a 100644
--- a/app/views/projects/issues/_related_branches.html.haml
+++ b/app/views/projects/issues/_related_branches.html.haml
@@ -1,6 +1,6 @@
- if @related_branches.any?
%h2.related-branches-title
- = pluralize(@related_branches.count, 'Related Branch')
+ = pluralize(@related_branches.size, 'Related Branch')
%ul.unstyled-list.related-merge-requests
- @related_branches.each do |branch|
%li
diff --git a/app/views/projects/issues/show.html.haml b/app/views/projects/issues/show.html.haml
index 1f499dbd0a2..4fc67884584 100644
--- a/app/views/projects/issues/show.html.haml
+++ b/app/views/projects/issues/show.html.haml
@@ -87,8 +87,7 @@
.col-md-12.col-lg-6.js-noteable-awards
= render 'award_emoji/awards_block', awardable: @issue, inline: true
.col-md-12.col-lg-6.new-branch-col
- - if Feature.enabled?(:sort_discussions, @project)
- #js-vue-sort-issue-discussions
+ #js-vue-sort-issue-discussions
#js-vue-discussion-filter{ data: { default_filter: current_user&.notes_filter_for(@issue), notes_filters: UserPreference.notes_filters.to_json } }
= render 'new_branch' if show_new_branch_button?
diff --git a/app/views/projects/merge_requests/_awards_block.html.haml b/app/views/projects/merge_requests/_awards_block.html.haml
index c1e92e22590..e4a7b9b7e62 100644
--- a/app/views/projects/merge_requests/_awards_block.html.haml
+++ b/app/views/projects/merge_requests/_awards_block.html.haml
@@ -2,6 +2,5 @@
= render 'award_emoji/awards_block', awardable: @merge_request, inline: true do
- if mr_tabs_position_enabled?
.ml-auto.mt-auto.mb-auto
- - if Feature.enabled?(:sort_discussions, @merge_request.target_project)
- #js-vue-sort-issue-discussions
+ #js-vue-sort-issue-discussions
= render "projects/merge_requests/discussion_filter"
diff --git a/app/views/projects/pages/_list.html.haml b/app/views/projects/pages/_list.html.haml
index 0d40f375926..c116efe521a 100644
--- a/app/views/projects/pages/_list.html.haml
+++ b/app/views/projects/pages/_list.html.haml
@@ -3,10 +3,9 @@
- if can?(current_user, :update_pages, @project) && @domains.any?
.card
.card-header
- Domains (#{@domains.count})
+ Domains (#{@domains.size})
%ul.list-group.list-group-flush.pages-domain-list{ class: ("has-verification-status" if verification_enabled) }
- @domains.each do |domain|
- - domain = Gitlab::View::Presenter::Factory.new(domain, current_user: current_user).fabricate!
%li.pages-domain-list-item.list-group-item.d-flex.justify-content-between
- if verification_enabled
- tooltip, status = domain.unverified? ? [s_('GitLabPages|Unverified'), 'failed'] : [s_('GitLabPages|Verified'), 'success']
@@ -35,6 +34,6 @@
%li.list-group-item.bs-callout-warning
- details_link_start = "<a href='#{project_pages_domain_path(@project, domain)}'>".html_safe
- details_link_end = '</a>'.html_safe
- = s_("GitLabPages|Something went wrong while obtaining Let's Encrypt certificate for %{domain}. To retry visit your %{link_start}domain details%{link_end}.").html_safe % { domain: domain.domain,
+ = s_("GitLabPages|Something went wrong while obtaining the Let's Encrypt certificate for %{domain}. To retry visit your %{link_start}domain details%{link_end}.").html_safe % { domain: domain.domain,
link_start: details_link_start,
link_end: details_link_end }
diff --git a/app/views/projects/pages_domains/_certificate.html.haml b/app/views/projects/pages_domains/_certificate.html.haml
index 92d30e0b056..e95841f2867 100644
--- a/app/views/projects/pages_domains/_certificate.html.haml
+++ b/app/views/projects/pages_domains/_certificate.html.haml
@@ -36,7 +36,7 @@
= _('Certificate')
.d-flex.justify-content-between.align-items-center.p-3
%span
- = @domain.subject || _('missing')
+ = @domain.pages_domain.subject || _('missing')
= link_to _('Remove'),
clean_certificate_project_pages_domain_path(@project, @domain),
data: { confirm: _('Are you sure?') },
diff --git a/app/views/projects/pages_domains/_lets_encrypt_callout.html.haml b/app/views/projects/pages_domains/_lets_encrypt_callout.html.haml
index d6406a78fca..f2de42b218c 100644
--- a/app/views/projects/pages_domains/_lets_encrypt_callout.html.haml
+++ b/app/views/projects/pages_domains/_lets_encrypt_callout.html.haml
@@ -1,10 +1,21 @@
- if @domain.enabled?
- - if @domain.auto_ssl_enabled && !@domain.certificate
- .form-group.border-section.js-shown-if-auto-ssl{ class: ("d-none" unless auto_ssl_available_and_enabled) }
- .row
- .col-sm-10.offset-sm-2
- .bs-callout.bs-callout-info.mt-0
- = _("GitLab is obtaining a Let's Encrypt SSL certificate for this domain. This process can take some time. Please try again later.")
+ - if @domain.auto_ssl_enabled
+ - if @domain.show_auto_ssl_failed_warning?
+ .form-group.border-section.js-shown-if-auto-ssl{ class: ("d-none" unless auto_ssl_available_and_enabled) }
+ .row
+ .col-sm-10.offset-sm-2
+ .bs-callout.bs-callout-warning.mt-0
+ .row.align-items-center.mx-2
+ = icon('warning', class: 'mr-2')
+ = _("Something went wrong while obtaining the Let's Encrypt certificate.")
+ .row.mx-0.mt-3
+ = link_to s_('GitLabPagesDomains|Retry'), retry_auto_ssl_project_pages_domain_path(@project, @domain), class: "btn btn-sm btn-grouped btn-warning", method: :post
+ - elsif !@domain.certificate_gitlab_provided?
+ .form-group.border-section.js-shown-if-auto-ssl{ class: ("d-none" unless auto_ssl_available_and_enabled) }
+ .row
+ .col-sm-10.offset-sm-2
+ .bs-callout.bs-callout-info.mt-0
+ = _("GitLab is obtaining a Let's Encrypt SSL certificate for this domain. This process can take some time. Please try again later.")
- else
.form-group.border-section.js-shown-if-auto-ssl{ class: ("d-none" unless auto_ssl_available_and_enabled) }
.row
diff --git a/app/views/projects/settings/ci_cd/show.html.haml b/app/views/projects/settings/ci_cd/show.html.haml
index ab2f64cdc21..c0f60b5f3b1 100644
--- a/app/views/projects/settings/ci_cd/show.html.haml
+++ b/app/views/projects/settings/ci_cd/show.html.haml
@@ -4,7 +4,7 @@
- expanded = expanded_by_default?
- general_expanded = @project.errors.empty? ? expanded : true
-- deploy_token_description = s_('DeployTokens|Deploy tokens allow read-only access to your repository and registry images.')
+- deploy_token_description = s_('DeployTokens|Deploy tokens allow access to your repository and registry images.')
%section.settings#js-general-pipeline-settings.no-animate{ class: ('expanded' if general_expanded) }
.settings-header
diff --git a/app/views/projects/static_site_editor/show.html.haml b/app/views/projects/static_site_editor/show.html.haml
index 9ccc54e6d51..8d2649be588 100644
--- a/app/views/projects/static_site_editor/show.html.haml
+++ b/app/views/projects/static_site_editor/show.html.haml
@@ -1 +1 @@
-#static-site-editor{ data: { project_id: '8', path: 'README.md' } }
+#static-site-editor{ data: @config.payload }
diff --git a/app/views/shared/deploy_tokens/_form.html.haml b/app/views/shared/deploy_tokens/_form.html.haml
index c4e82d8e157..5751ed9cb7a 100644
--- a/app/views/shared/deploy_tokens/_form.html.haml
+++ b/app/views/shared/deploy_tokens/_form.html.haml
@@ -30,5 +30,10 @@
= label_tag ("deploy_token_read_registry"), 'read_registry', class: 'label-bold form-check-label'
.text-secondary= s_('DeployTokens|Allows read-only access to the registry images')
+ %fieldset.form-group.form-check
+ = f.check_box :write_registry, class: 'form-check-input'
+ = label_tag ("deploy_token_write_registry"), 'write_registry', class: 'label-bold form-check-label'
+ .text-secondary= s_('DeployTokens|Allows write access to the registry images')
+
.prepend-top-default
= f.submit s_('DeployTokens|Create deploy token'), class: 'btn btn-success qa-create-deploy-token'
diff --git a/app/workers/concerns/project_import_options.rb b/app/workers/concerns/project_import_options.rb
index 2baf768bfd1..c8ee5539441 100644
--- a/app/workers/concerns/project_import_options.rb
+++ b/app/workers/concerns/project_import_options.rb
@@ -18,7 +18,12 @@ module ProjectImportOptions
"import"
end
- project.import_state.mark_as_failed(_("Every %{action} attempt has failed: %{job_error_message}. Please try again.") % { action: action, job_error_message: job['error_message'] })
+ if project.jira_import?
+ project.latest_jira_import.do_fail!
+ else
+ project.import_state.mark_as_failed(_("Every %{action} attempt has failed: %{job_error_message}. Please try again.") % { action: action, job_error_message: job['error_message'] })
+ end
+
Sidekiq.logger.warn "Failed #{job['class']} with #{job['args']}: #{job['error_message']}"
end
end
diff --git a/app/workers/environments/auto_stop_cron_worker.rb b/app/workers/environments/auto_stop_cron_worker.rb
index de5e10a0976..ada52d3402d 100644
--- a/app/workers/environments/auto_stop_cron_worker.rb
+++ b/app/workers/environments/auto_stop_cron_worker.rb
@@ -8,8 +8,6 @@ module Environments
feature_category :continuous_delivery
def perform
- return unless Feature.enabled?(:auto_stop_environments, default_enabled: true)
-
AutoStopService.new.execute
end
end
diff --git a/app/workers/project_update_repository_storage_worker.rb b/app/workers/project_update_repository_storage_worker.rb
index bb40107494b..ecee33e6421 100644
--- a/app/workers/project_update_repository_storage_worker.rb
+++ b/app/workers/project_update_repository_storage_worker.rb
@@ -3,21 +3,11 @@
class ProjectUpdateRepositoryStorageWorker # rubocop:disable Scalability/IdempotentWorker
include ApplicationWorker
- SameFilesystemError = Class.new(StandardError)
-
feature_category :gitaly
def perform(project_id, new_repository_storage_key)
project = Project.find(project_id)
- raise SameFilesystemError if same_filesystem?(project.repository.storage, new_repository_storage_key)
-
::Projects::UpdateRepositoryStorageService.new(project).execute(new_repository_storage_key)
end
-
- private
-
- def same_filesystem?(old_storage, new_storage)
- Gitlab::GitalyClient.filesystem_id(old_storage) == Gitlab::GitalyClient.filesystem_id(new_storage)
- end
end
diff --git a/changelogs/unreleased/199195-ide-fix-diff-highlighting.yml b/changelogs/unreleased/199195-ide-fix-diff-highlighting.yml
new file mode 100644
index 00000000000..439b4348241
--- /dev/null
+++ b/changelogs/unreleased/199195-ide-fix-diff-highlighting.yml
@@ -0,0 +1,5 @@
+---
+title: Fix Web IDE not showing diff when opening commit tab
+merge_request: 29439
+author:
+type: fixed
diff --git a/changelogs/unreleased/207528-tf-plan-in-mr.yml b/changelogs/unreleased/207528-tf-plan-in-mr.yml
new file mode 100644
index 00000000000..6535e17d48c
--- /dev/null
+++ b/changelogs/unreleased/207528-tf-plan-in-mr.yml
@@ -0,0 +1,5 @@
+---
+title: Add terraform report to merge request widget
+merge_request: 27700
+author:
+type: added
diff --git a/changelogs/unreleased/207549-add-refresh-dashboard-button-second-iteration.yml b/changelogs/unreleased/207549-add-refresh-dashboard-button-second-iteration.yml
new file mode 100644
index 00000000000..ee415ecb867
--- /dev/null
+++ b/changelogs/unreleased/207549-add-refresh-dashboard-button-second-iteration.yml
@@ -0,0 +1,5 @@
+---
+title: Refresh metrics dashboard data without reloading the page
+merge_request: 28756
+author:
+type: added
diff --git a/changelogs/unreleased/207912-integrate-filtered-search-component.yml b/changelogs/unreleased/207912-integrate-filtered-search-component.yml
new file mode 100644
index 00000000000..476acfd4964
--- /dev/null
+++ b/changelogs/unreleased/207912-integrate-filtered-search-component.yml
@@ -0,0 +1,5 @@
+---
+title: Add filtered search for elastic search in logs
+merge_request: 27654
+author:
+type: added
diff --git a/changelogs/unreleased/211460-annotations-post-endpoint-revised.yml b/changelogs/unreleased/211460-annotations-post-endpoint-revised.yml
new file mode 100644
index 00000000000..26abcf76ea6
--- /dev/null
+++ b/changelogs/unreleased/211460-annotations-post-endpoint-revised.yml
@@ -0,0 +1,5 @@
+---
+title: API endpoint to create annotations for environments dashboard
+merge_request: 29089
+author:
+type: added
diff --git a/changelogs/unreleased/211998-add-cluster-mangement-id-on-create.yml b/changelogs/unreleased/211998-add-cluster-mangement-id-on-create.yml
new file mode 100644
index 00000000000..cca81a37179
--- /dev/null
+++ b/changelogs/unreleased/211998-add-cluster-mangement-id-on-create.yml
@@ -0,0 +1,6 @@
+---
+title: Add management_project_id to group and project cluster creation, clarifies
+ docs.
+merge_request: 28289
+author:
+type: fixed
diff --git a/changelogs/unreleased/212560_initialize_sse_frontend.yml b/changelogs/unreleased/212560_initialize_sse_frontend.yml
new file mode 100644
index 00000000000..0ee5d7669ba
--- /dev/null
+++ b/changelogs/unreleased/212560_initialize_sse_frontend.yml
@@ -0,0 +1,5 @@
+---
+title: Provide configuration options for Static Site Editor
+merge_request: 29058
+author:
+type: added
diff --git a/changelogs/unreleased/212561-fix-empty-edit-area.yml b/changelogs/unreleased/212561-fix-empty-edit-area.yml
new file mode 100644
index 00000000000..0e45110265e
--- /dev/null
+++ b/changelogs/unreleased/212561-fix-empty-edit-area.yml
@@ -0,0 +1,5 @@
+---
+title: 'fix: Publish toolbar dissappears when submitting empty content'
+merge_request: 29410
+author:
+type: fixed
diff --git a/changelogs/unreleased/212561-saving-changes-rest-service.yml b/changelogs/unreleased/212561-saving-changes-rest-service.yml
new file mode 100644
index 00000000000..e7d45f4cd92
--- /dev/null
+++ b/changelogs/unreleased/212561-saving-changes-rest-service.yml
@@ -0,0 +1,5 @@
+---
+title: Save changes in Static Site Editor using REST GitLab API
+merge_request: 29286
+author:
+type: added
diff --git a/changelogs/unreleased/213225-adjust-issues-label-on-jira-import.yml b/changelogs/unreleased/213225-adjust-issues-label-on-jira-import.yml
new file mode 100644
index 00000000000..045756cd025
--- /dev/null
+++ b/changelogs/unreleased/213225-adjust-issues-label-on-jira-import.yml
@@ -0,0 +1,5 @@
+---
+title: Adjust label title applied to issues on import from Jira
+merge_request: 29246
+author:
+type: changed
diff --git a/changelogs/unreleased/213299-env-autostop-bug.yml b/changelogs/unreleased/213299-env-autostop-bug.yml
new file mode 100644
index 00000000000..f4d3198f8bd
--- /dev/null
+++ b/changelogs/unreleased/213299-env-autostop-bug.yml
@@ -0,0 +1,5 @@
+---
+title: Add autostop check to folder table
+merge_request: 28937
+author:
+type: fixed
diff --git a/changelogs/unreleased/213325-elastic-recommendation-alert-appears-when-the-screen-is-loaded.yml b/changelogs/unreleased/213325-elastic-recommendation-alert-appears-when-the-screen-is-loaded.yml
new file mode 100644
index 00000000000..43583268c84
--- /dev/null
+++ b/changelogs/unreleased/213325-elastic-recommendation-alert-appears-when-the-screen-is-loaded.yml
@@ -0,0 +1,5 @@
+---
+title: Elasticsearch recommendation alert does not appears while screen is loaded
+merge_request: 29097
+author:
+type: fixed
diff --git a/changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml b/changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml
new file mode 100644
index 00000000000..066c8e2c45c
--- /dev/null
+++ b/changelogs/unreleased/213382-use-not-valid-to-immediately-enforce-a-not-null-constraint.yml
@@ -0,0 +1,6 @@
+---
+title: Use NOT VALID to enforce a NOT NULL constraint on file_store to ci_job_artifacts,
+ lfs_objects and uploads tables
+merge_request: 28946
+author:
+type: fixed
diff --git a/changelogs/unreleased/213799-optimize-usage_activity_by_stage-projects_with_repositories_enable.yml b/changelogs/unreleased/213799-optimize-usage_activity_by_stage-projects_with_repositories_enable.yml
new file mode 100644
index 00000000000..af6a79a59b5
--- /dev/null
+++ b/changelogs/unreleased/213799-optimize-usage_activity_by_stage-projects_with_repositories_enable.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize projects with repositories enabled usage data
+merge_request: 29117
+author:
+type: performance
diff --git a/changelogs/unreleased/213800-optimize-usage_activity_by_stage-create-protected_branches.yml b/changelogs/unreleased/213800-optimize-usage_activity_by_stage-create-protected_branches.yml
new file mode 100644
index 00000000000..ab4233fda75
--- /dev/null
+++ b/changelogs/unreleased/213800-optimize-usage_activity_by_stage-create-protected_branches.yml
@@ -0,0 +1,5 @@
+---
+title: Optimize protected branches usage data
+merge_request: 29148
+author:
+type: performance
diff --git a/changelogs/unreleased/214218-feature-flag-enable-sort_discussions.yml b/changelogs/unreleased/214218-feature-flag-enable-sort_discussions.yml
new file mode 100644
index 00000000000..4e76379c0fe
--- /dev/null
+++ b/changelogs/unreleased/214218-feature-flag-enable-sort_discussions.yml
@@ -0,0 +1,5 @@
+---
+title: Allow sorting of issue and MR discussions
+merge_request: 29492
+author:
+type: added
diff --git a/changelogs/unreleased/22743-deploy-token-write-registry.yml b/changelogs/unreleased/22743-deploy-token-write-registry.yml
new file mode 100644
index 00000000000..842ef95446d
--- /dev/null
+++ b/changelogs/unreleased/22743-deploy-token-write-registry.yml
@@ -0,0 +1,5 @@
+---
+title: Add write_registry scope to deploy tokens for container registry push access
+merge_request: 28958
+author:
+type: added
diff --git a/changelogs/unreleased/34527-fix-graphql-endpoint-for-merge-requests.yml b/changelogs/unreleased/34527-fix-graphql-endpoint-for-merge-requests.yml
new file mode 100644
index 00000000000..b8cc411e120
--- /dev/null
+++ b/changelogs/unreleased/34527-fix-graphql-endpoint-for-merge-requests.yml
@@ -0,0 +1,5 @@
+---
+title: Fix pagination in Merge Request GraphQL api
+merge_request: 28667
+author: briankabiro
+type: fixed
diff --git a/changelogs/unreleased/37001.yml b/changelogs/unreleased/37001.yml
new file mode 100644
index 00000000000..7fdddb1edb4
--- /dev/null
+++ b/changelogs/unreleased/37001.yml
@@ -0,0 +1,5 @@
+---
+title: Add grab cursor for operations dashboard cards
+merge_request: 28868
+author:
+type: changed
diff --git a/changelogs/unreleased/55241-rate-limit-issue-creation.yml b/changelogs/unreleased/55241-rate-limit-issue-creation.yml
new file mode 100644
index 00000000000..76b3269ecb0
--- /dev/null
+++ b/changelogs/unreleased/55241-rate-limit-issue-creation.yml
@@ -0,0 +1,5 @@
+---
+title: Introduce rate limit for creating issues via web UI
+merge_request: 28129
+author:
+type: performance
diff --git a/changelogs/unreleased/bvl-remove-sidekiq-deduplication-feature-flag.yml b/changelogs/unreleased/bvl-remove-sidekiq-deduplication-feature-flag.yml
new file mode 100644
index 00000000000..cc8da28f6ca
--- /dev/null
+++ b/changelogs/unreleased/bvl-remove-sidekiq-deduplication-feature-flag.yml
@@ -0,0 +1,5 @@
+---
+title: Avoid scheduling duplicate sidekiq jobs
+merge_request: 29116
+author:
+type: performance
diff --git a/changelogs/unreleased/filter-pipeline-merge-requests-by-sha.yml b/changelogs/unreleased/filter-pipeline-merge-requests-by-sha.yml
new file mode 100644
index 00000000000..3dc1fbfac02
--- /dev/null
+++ b/changelogs/unreleased/filter-pipeline-merge-requests-by-sha.yml
@@ -0,0 +1,5 @@
+---
+title: Prevent false positives in Ci::Pipeline#all_merge_requests
+merge_request: 28800
+author:
+type: fixed
diff --git a/changelogs/unreleased/fix-keyboard-shortcut-nav-to-groups.yml b/changelogs/unreleased/fix-keyboard-shortcut-nav-to-groups.yml
new file mode 100644
index 00000000000..169d63d941e
--- /dev/null
+++ b/changelogs/unreleased/fix-keyboard-shortcut-nav-to-groups.yml
@@ -0,0 +1,5 @@
+---
+title: Fix keyboard shortcut to navigate to your groups
+merge_request: 28873
+author: Victor Wu
+type: other
diff --git a/changelogs/unreleased/github-rate-limit-on-project-import.yml b/changelogs/unreleased/github-rate-limit-on-project-import.yml
new file mode 100644
index 00000000000..24df1e68a89
--- /dev/null
+++ b/changelogs/unreleased/github-rate-limit-on-project-import.yml
@@ -0,0 +1,5 @@
+---
+title: Better error message when importing a Github project and Github API rate limit is exceeded
+merge_request: 28785
+author:
+type: fixed
diff --git a/changelogs/unreleased/patch-97.yml b/changelogs/unreleased/patch-97.yml
new file mode 100644
index 00000000000..e4cd9b5243c
--- /dev/null
+++ b/changelogs/unreleased/patch-97.yml
@@ -0,0 +1,5 @@
+---
+title: Remove 'error' from diff note error message
+merge_request: 29281
+author:
+type: fixed
diff --git a/changelogs/unreleased/ph-210377-increaseMrPollTimes.yml b/changelogs/unreleased/ph-210377-increaseMrPollTimes.yml
new file mode 100644
index 00000000000..fb953f03e60
--- /dev/null
+++ b/changelogs/unreleased/ph-210377-increaseMrPollTimes.yml
@@ -0,0 +1,5 @@
+---
+title: Increase the timing of polling for the merge request widget
+merge_request:
+author:
+type: changed
diff --git a/changelogs/unreleased/sh-improve-dast-template-error.yml b/changelogs/unreleased/sh-improve-dast-template-error.yml
new file mode 100644
index 00000000000..d1943c1e99e
--- /dev/null
+++ b/changelogs/unreleased/sh-improve-dast-template-error.yml
@@ -0,0 +1,5 @@
+---
+title: Improve error message in DAST CI template
+merge_request: 29388
+author:
+type: other
diff --git a/changelogs/unreleased/slack-notification-retry-success-skip.yml b/changelogs/unreleased/slack-notification-retry-success-skip.yml
new file mode 100644
index 00000000000..1779ba8b138
--- /dev/null
+++ b/changelogs/unreleased/slack-notification-retry-success-skip.yml
@@ -0,0 +1,5 @@
+---
+title: Make pipeline info in chat notifications concise
+merge_request: 28284
+author:
+type: changed
diff --git a/changelogs/unreleased/update-ci-variable-qa-test.yml b/changelogs/unreleased/update-ci-variable-qa-test.yml
new file mode 100644
index 00000000000..ca34985917f
--- /dev/null
+++ b/changelogs/unreleased/update-ci-variable-qa-test.yml
@@ -0,0 +1,5 @@
+---
+title: Fix failing ci variable e2e test
+merge_request: 25924
+author:
+type: fixed
diff --git a/changelogs/unreleased/vs-migrate-deprecated-size-in-loading-icon.yml b/changelogs/unreleased/vs-migrate-deprecated-size-in-loading-icon.yml
new file mode 100644
index 00000000000..f78fa371c22
--- /dev/null
+++ b/changelogs/unreleased/vs-migrate-deprecated-size-in-loading-icon.yml
@@ -0,0 +1,5 @@
+---
+title: Replace deprecated GlLoadingIcon sizes
+merge_request: 29417
+author:
+type: fixed
diff --git a/config/pseudonymizer.yml b/config/pseudonymizer.yml
index 7b5f8aad255..195506ac4a1 100644
--- a/config/pseudonymizer.yml
+++ b/config/pseudonymizer.yml
@@ -239,7 +239,6 @@ tables:
- repository_size_limit
- require_two_factor_authentication
- two_factor_grace_period
- - plan_id
- project_creation_level
members:
whitelist:
diff --git a/config/routes/project.rb b/config/routes/project.rb
index 943ff24ffee..ab7c439318f 100644
--- a/config/routes/project.rb
+++ b/config/routes/project.rb
@@ -26,6 +26,10 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
scope '-' do
get 'archive/*id', constraints: { format: Gitlab::PathRegex.archive_formats_regex, id: /.+?/ }, to: 'repositories#archive', as: 'archive'
+ scope controller: :static_site_editor do
+ get '/sse/*id', action: :show, as: :show_sse
+ end
+
resources :artifacts, only: [:index, :destroy]
resources :jobs, only: [:index, :show], constraints: { id: /\d+/ } do
@@ -334,6 +338,7 @@ constraints(::Constraints::ProjectUrlConstrainer.new) do
resources :domains, except: :index, controller: 'pages_domains', constraints: { id: %r{[^/]+} } do
member do
post :verify
+ post :retry_auto_ssl
delete :clean_certificate
end
end
diff --git a/config/routes/repository.rb b/config/routes/repository.rb
index 8a635745907..38d6cdbbaf8 100644
--- a/config/routes/repository.rb
+++ b/config/routes/repository.rb
@@ -67,10 +67,6 @@ scope format: false do
end
end
- scope controller: :static_site_editor do
- get '/sse/*id', action: :show, as: :show_sse
- end
-
get '/tree/*id', to: 'tree#show', as: :tree
get '/raw/*id', to: 'raw#show', as: :raw
get '/blame/*id', to: 'blame#show', as: :blame
diff --git a/db/migrate/20200325111432_add_issues_create_limit_to_application_settings.rb b/db/migrate/20200325111432_add_issues_create_limit_to_application_settings.rb
new file mode 100644
index 00000000000..60da96ccf33
--- /dev/null
+++ b/db/migrate/20200325111432_add_issues_create_limit_to_application_settings.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class AddIssuesCreateLimitToApplicationSettings < ActiveRecord::Migration[6.0]
+ DOWNTIME = false
+
+ def change
+ add_column :application_settings, :issues_create_limit, :integer, default: 300, null: false
+ end
+end
diff --git a/db/migrate/20200326122700_create_diff_note_positions.rb b/db/migrate/20200326122700_create_diff_note_positions.rb
new file mode 100644
index 00000000000..87159e666b5
--- /dev/null
+++ b/db/migrate/20200326122700_create_diff_note_positions.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+class CreateDiffNotePositions < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ create_table :diff_note_positions do |t|
+ t.references :note, foreign_key: { on_delete: :cascade }, null: false, index: false
+ t.integer :old_line
+ t.integer :new_line
+ t.integer :diff_content_type, limit: 2, null: false
+ t.integer :diff_type, limit: 2, null: false
+ t.string :line_code, limit: 255, null: false
+ t.binary :base_sha, null: false
+ t.binary :start_sha, null: false
+ t.binary :head_sha, null: false
+ t.text :old_path, null: false
+ t.text :new_path, null: false
+
+ t.index [:note_id, :diff_type], unique: true
+ end
+ end
+ end
+
+ def down
+ drop_table :diff_note_positions
+ end
+end
diff --git a/db/migrate/20200406102111_add_index_to_deployments_where_cluster_id_is_not_null.rb b/db/migrate/20200406102111_add_index_to_deployments_where_cluster_id_is_not_null.rb
new file mode 100644
index 00000000000..67ffba6af5e
--- /dev/null
+++ b/db/migrate/20200406102111_add_index_to_deployments_where_cluster_id_is_not_null.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexToDeploymentsWhereClusterIdIsNotNull < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :deployments, :id, where: 'cluster_id IS NOT NULL', name: 'index_deployments_on_id_where_cluster_id_present'
+ end
+
+ def down
+ remove_concurrent_index :deployments, :id, where: 'cluster_id IS NOT NULL', name: 'index_deployments_on_id_where_cluster_id_present'
+ end
+end
diff --git a/db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb b/db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb
new file mode 100644
index 00000000000..78b5832fea4
--- /dev/null
+++ b/db/migrate/20200406165950_add_not_null_constraint_on_file_store_to_lfs_objects.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+class AddNotNullConstraintOnFileStoreToLfsObjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ CONSTRAINT_NAME = 'lfs_objects_file_store_not_null'
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE lfs_objects ADD CONSTRAINT #{CONSTRAINT_NAME} CHECK (file_store IS NOT NULL) NOT VALID;
+ SQL
+ end
+ end
+
+ def down
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE lfs_objects DROP CONSTRAINT IF EXISTS #{CONSTRAINT_NAME};
+ SQL
+ end
+ end
+end
diff --git a/db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb b/db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb
new file mode 100644
index 00000000000..1d44e5c17b3
--- /dev/null
+++ b/db/migrate/20200406171857_add_not_null_constraint_on_file_store_to_ci_job_artifacts.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+class AddNotNullConstraintOnFileStoreToCiJobArtifacts < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ CONSTRAINT_NAME = 'ci_job_artifacts_file_store_not_null'
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE ci_job_artifacts ADD CONSTRAINT #{CONSTRAINT_NAME} CHECK (file_store IS NOT NULL) NOT VALID;
+ SQL
+ end
+ end
+
+ def down
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE ci_job_artifacts DROP CONSTRAINT IF EXISTS #{CONSTRAINT_NAME};
+ SQL
+ end
+ end
+end
diff --git a/db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb b/db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb
new file mode 100644
index 00000000000..aa498ba9c89
--- /dev/null
+++ b/db/migrate/20200406172135_add_not_null_constraint_on_file_store_to_uploads.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+class AddNotNullConstraintOnFileStoreToUploads < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ CONSTRAINT_NAME = 'uploads_store_not_null'
+ DOWNTIME = false
+
+ def up
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE uploads ADD CONSTRAINT #{CONSTRAINT_NAME} CHECK (store IS NOT NULL) NOT VALID;
+ SQL
+ end
+ end
+
+ def down
+ with_lock_retries do
+ execute <<~SQL
+ ALTER TABLE uploads DROP CONSTRAINT IF EXISTS #{CONSTRAINT_NAME};
+ SQL
+ end
+ end
+end
diff --git a/db/migrate/20200406192059_add_write_registry_to_deploy_tokens.rb b/db/migrate/20200406192059_add_write_registry_to_deploy_tokens.rb
new file mode 100644
index 00000000000..22fdb030edc
--- /dev/null
+++ b/db/migrate/20200406192059_add_write_registry_to_deploy_tokens.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddWriteRegistryToDeployTokens < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_column_with_default(:deploy_tokens, :write_registry, :boolean, default: false, allow_null: false)
+ end
+
+ def down
+ remove_column(:deploy_tokens, :write_registry)
+ end
+end
diff --git a/db/migrate/20200408153842_add_index_on_creator_id_and_id_on_projects.rb b/db/migrate/20200408153842_add_index_on_creator_id_and_id_on_projects.rb
new file mode 100644
index 00000000000..2cc91efcc36
--- /dev/null
+++ b/db/migrate/20200408153842_add_index_on_creator_id_and_id_on_projects.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexOnCreatorIdAndIdOnProjects < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :projects, [:creator_id, :id]
+ end
+
+ def down
+ remove_concurrent_index :projects, [:creator_id, :id]
+ end
+end
diff --git a/db/migrate/20200408175424_add_index_on_creator_id_created_at_id_to_projects_table.rb b/db/migrate/20200408175424_add_index_on_creator_id_created_at_id_to_projects_table.rb
new file mode 100644
index 00000000000..70df38aea17
--- /dev/null
+++ b/db/migrate/20200408175424_add_index_on_creator_id_created_at_id_to_projects_table.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class AddIndexOnCreatorIdCreatedAtIdToProjectsTable < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ disable_ddl_transaction!
+
+ def up
+ add_concurrent_index :projects, [:creator_id, :created_at, :id]
+ end
+
+ def down
+ remove_concurrent_index :projects, [:creator_id, :created_at, :id]
+ end
+end
diff --git a/db/post_migrate/20200406102120_backfill_deployment_clusters_from_deployments.rb b/db/post_migrate/20200406102120_backfill_deployment_clusters_from_deployments.rb
new file mode 100644
index 00000000000..2db270d303c
--- /dev/null
+++ b/db/post_migrate/20200406102120_backfill_deployment_clusters_from_deployments.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+# See http://doc.gitlab.com/ce/development/migration_style_guide.html
+# for more information on how to write migrations for GitLab.
+
+class BackfillDeploymentClustersFromDeployments < ActiveRecord::Migration[6.0]
+ include Gitlab::Database::MigrationHelpers
+
+ DOWNTIME = false
+
+ MIGRATION = 'BackfillDeploymentClustersFromDeployments'
+ DELAY_INTERVAL = 2.minutes
+ BATCH_SIZE = 10_000
+
+ disable_ddl_transaction!
+
+ class Deployment < ActiveRecord::Base
+ include EachBatch
+
+ default_scope { where('cluster_id IS NOT NULL') }
+
+ self.table_name = 'deployments'
+ end
+
+ def up
+ say "Scheduling `#{MIGRATION}` jobs"
+
+ queue_background_migration_jobs_by_range_at_intervals(Deployment, MIGRATION, DELAY_INTERVAL, batch_size: BATCH_SIZE)
+ end
+
+ def down
+ # NOOP
+ end
+end
diff --git a/db/structure.sql b/db/structure.sql
index 90585a157cf..f924d69fd75 100644
--- a/db/structure.sql
+++ b/db/structure.sql
@@ -397,6 +397,7 @@ CREATE TABLE public.application_settings (
email_restrictions text,
npm_package_requests_forwarding boolean DEFAULT true NOT NULL,
namespace_storage_size_limit bigint DEFAULT 0 NOT NULL,
+ issues_create_limit integer DEFAULT 300 NOT NULL,
seat_link_enabled boolean DEFAULT true NOT NULL,
container_expiration_policies_enable_historic_entries boolean DEFAULT false NOT NULL
);
@@ -1997,7 +1998,8 @@ CREATE TABLE public.deploy_tokens (
token character varying,
username character varying,
token_encrypted character varying(255),
- deploy_token_type smallint DEFAULT 2 NOT NULL
+ deploy_token_type smallint DEFAULT 2 NOT NULL,
+ write_registry boolean DEFAULT false NOT NULL
);
CREATE SEQUENCE public.deploy_tokens_id_seq
@@ -2137,6 +2139,30 @@ CREATE SEQUENCE public.design_user_mentions_id_seq
ALTER SEQUENCE public.design_user_mentions_id_seq OWNED BY public.design_user_mentions.id;
+CREATE TABLE public.diff_note_positions (
+ id bigint NOT NULL,
+ note_id bigint NOT NULL,
+ old_line integer,
+ new_line integer,
+ diff_content_type smallint NOT NULL,
+ diff_type smallint NOT NULL,
+ line_code character varying(255) NOT NULL,
+ base_sha bytea NOT NULL,
+ start_sha bytea NOT NULL,
+ head_sha bytea NOT NULL,
+ old_path text NOT NULL,
+ new_path text NOT NULL
+);
+
+CREATE SEQUENCE public.diff_note_positions_id_seq
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+ALTER SEQUENCE public.diff_note_positions_id_seq OWNED BY public.diff_note_positions.id;
+
CREATE TABLE public.draft_notes (
id bigint NOT NULL,
merge_request_id integer NOT NULL,
@@ -7123,6 +7149,8 @@ ALTER TABLE ONLY public.design_management_versions ALTER COLUMN id SET DEFAULT n
ALTER TABLE ONLY public.design_user_mentions ALTER COLUMN id SET DEFAULT nextval('public.design_user_mentions_id_seq'::regclass);
+ALTER TABLE ONLY public.diff_note_positions ALTER COLUMN id SET DEFAULT nextval('public.diff_note_positions_id_seq'::regclass);
+
ALTER TABLE ONLY public.draft_notes ALTER COLUMN id SET DEFAULT nextval('public.draft_notes_id_seq'::regclass);
ALTER TABLE ONLY public.emails ALTER COLUMN id SET DEFAULT nextval('public.emails_id_seq'::regclass);
@@ -7669,6 +7697,9 @@ ALTER TABLE ONLY public.ci_daily_report_results
ALTER TABLE ONLY public.ci_group_variables
ADD CONSTRAINT ci_group_variables_pkey PRIMARY KEY (id);
+ALTER TABLE public.ci_job_artifacts
+ ADD CONSTRAINT ci_job_artifacts_file_store_not_null CHECK ((file_store IS NOT NULL)) NOT VALID;
+
ALTER TABLE ONLY public.ci_job_artifacts
ADD CONSTRAINT ci_job_artifacts_pkey PRIMARY KEY (id);
@@ -7828,6 +7859,9 @@ ALTER TABLE ONLY public.design_management_versions
ALTER TABLE ONLY public.design_user_mentions
ADD CONSTRAINT design_user_mentions_pkey PRIMARY KEY (id);
+ALTER TABLE ONLY public.diff_note_positions
+ ADD CONSTRAINT diff_note_positions_pkey PRIMARY KEY (id);
+
ALTER TABLE ONLY public.draft_notes
ADD CONSTRAINT draft_notes_pkey PRIMARY KEY (id);
@@ -8023,6 +8057,9 @@ ALTER TABLE ONLY public.ldap_group_links
ALTER TABLE ONLY public.lfs_file_locks
ADD CONSTRAINT lfs_file_locks_pkey PRIMARY KEY (id);
+ALTER TABLE public.lfs_objects
+ ADD CONSTRAINT lfs_objects_file_store_not_null CHECK ((file_store IS NOT NULL)) NOT VALID;
+
ALTER TABLE ONLY public.lfs_objects
ADD CONSTRAINT lfs_objects_pkey PRIMARY KEY (id);
@@ -8416,6 +8453,9 @@ ALTER TABLE ONLY public.u2f_registrations
ALTER TABLE ONLY public.uploads
ADD CONSTRAINT uploads_pkey PRIMARY KEY (id);
+ALTER TABLE public.uploads
+ ADD CONSTRAINT uploads_store_not_null CHECK ((store IS NOT NULL)) NOT VALID;
+
ALTER TABLE ONLY public.user_agent_details
ADD CONSTRAINT user_agent_details_pkey PRIMARY KEY (id);
@@ -9047,6 +9087,8 @@ CREATE INDEX index_deployments_on_environment_id_and_status ON public.deployment
CREATE INDEX index_deployments_on_id_and_status ON public.deployments USING btree (id, status);
+CREATE INDEX index_deployments_on_id_where_cluster_id_present ON public.deployments USING btree (id) WHERE (cluster_id IS NOT NULL);
+
CREATE INDEX index_deployments_on_project_id_and_id ON public.deployments USING btree (project_id, id DESC);
CREATE UNIQUE INDEX index_deployments_on_project_id_and_iid ON public.deployments USING btree (project_id, iid);
@@ -9085,6 +9127,8 @@ CREATE UNIQUE INDEX index_design_management_versions_on_sha_and_issue_id ON publ
CREATE UNIQUE INDEX index_design_user_mentions_on_note_id ON public.design_user_mentions USING btree (note_id);
+CREATE UNIQUE INDEX index_diff_note_positions_on_note_id_and_diff_type ON public.diff_note_positions USING btree (note_id, diff_type);
+
CREATE INDEX index_draft_notes_on_author_id ON public.draft_notes USING btree (author_id);
CREATE INDEX index_draft_notes_on_discussion_id ON public.draft_notes USING btree (discussion_id);
@@ -9883,6 +9927,10 @@ CREATE INDEX index_projects_on_created_at_and_id ON public.projects USING btree
CREATE INDEX index_projects_on_creator_id_and_created_at ON public.projects USING btree (creator_id, created_at);
+CREATE INDEX index_projects_on_creator_id_and_created_at_and_id ON public.projects USING btree (creator_id, created_at, id);
+
+CREATE INDEX index_projects_on_creator_id_and_id ON public.projects USING btree (creator_id, id);
+
CREATE INDEX index_projects_on_description_trigram ON public.projects USING gin (description public.gin_trgm_ops);
CREATE INDEX index_projects_on_id_and_archived_and_pending_delete ON public.projects USING btree (id) WHERE ((archived = false) AND (pending_delete = false));
@@ -11065,6 +11113,9 @@ ALTER TABLE ONLY public.project_statistics
ALTER TABLE ONLY public.user_details
ADD CONSTRAINT fk_rails_12e0b3043d FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE;
+ALTER TABLE ONLY public.diff_note_positions
+ ADD CONSTRAINT fk_rails_13c7212859 FOREIGN KEY (note_id) REFERENCES public.notes(id) ON DELETE CASCADE;
+
ALTER TABLE ONLY public.users_security_dashboard_projects
ADD CONSTRAINT fk_rails_150cd5682c FOREIGN KEY (project_id) REFERENCES public.projects(id) ON DELETE CASCADE;
@@ -13061,10 +13112,12 @@ COPY "schema_migrations" (version) FROM STDIN;
20200323134519
20200324093258
20200324115359
+20200325111432
20200325152327
20200325160952
20200325183636
20200326114443
+20200326122700
20200326124443
20200326134443
20200326135443
@@ -13086,9 +13139,17 @@ COPY "schema_migrations" (version) FROM STDIN;
20200403184110
20200403185127
20200403185422
+20200406102111
+20200406102120
20200406135648
+20200406165950
+20200406171857
+20200406172135
+20200406192059
20200407094005
20200407094923
20200408110856
+20200408153842
+20200408175424
\.
diff --git a/doc/administration/availability/index.md b/doc/administration/availability/index.md
index 90113985ad5..a0d4ea7919f 100644
--- a/doc/administration/availability/index.md
+++ b/doc/administration/availability/index.md
@@ -26,6 +26,14 @@ watch [this 1 hour Q&A](https://www.youtube.com/watch?v=uCU8jdYzpac)
with [John Northrup](https://gitlab.com/northrup), and live questions coming
in from some of our customers.
+GitLab offers a number of options to manage availability and resiliency. Below are the options to consider with trade-offs.
+
+| Event | GitLab Feature | Recovery Point Objective (RPO) | Recovery Time Objective (RTO) | Cost |
+| ----- | -------------- | --- | --- | ---- |
+| Availability Zone failure | "GitLab HA" | No loss | No loss | 2x Git storage, multiple nodes balanced across AZ's |
+| Region failure | "GitLab Disaster Recovery" | 5-10 minutes | 30 minutes | 2x primary cost |
+| All failures | Backup/Restore | Last backup | Hours to Days | Cost of storing the backups |
+
## High availability
### Omnibus installation with automatic database failover
diff --git a/doc/administration/geo/replication/version_specific_updates.md b/doc/administration/geo/replication/version_specific_updates.md
index a697d07ded4..81868d19c7f 100644
--- a/doc/administration/geo/replication/version_specific_updates.md
+++ b/doc/administration/geo/replication/version_specific_updates.md
@@ -4,6 +4,13 @@ Check this document if it includes instructions for the version you are updating
These steps go together with the [general steps](updating_the_geo_nodes.md#general-update-steps)
for updating Geo nodes.
+## Updating to GitLab 12.9
+
+CAUTION: **Warning:**
+GitLab 12.9.0 through GitLab 12.9.3 are affected by [a bug that stops
+repository verification](https://gitlab.com/gitlab-org/gitlab/-/issues/213523).
+The issue is fixed in GitLab 12.9.4. Please upgrade to GitLab 12.9.4 or later.
+
## Updating to GitLab 12.7
DANGER: **Danger:**
@@ -47,17 +54,17 @@ sudo touch /etc/gitlab/disable-postgresql-upgrade
## Updating to GitLab 12.0
-WARNING: **Warning:**
+CAUTION: **Warning:**
This version is affected by [a bug that results in new LFS objects not being replicated to
Geo secondary nodes](https://gitlab.com/gitlab-org/gitlab/issues/32696). The issue is fixed
-in GitLab 12.1. Please upgrade to GitLab 12.1 or newer.
+in GitLab 12.1. Please upgrade to GitLab 12.1 or later.
## Updating to GitLab 11.11
-WARNING: **Warning:**
+CAUTION: **Warning:**
This version is affected by [a bug that results in new LFS objects not being replicated to
Geo secondary nodes](https://gitlab.com/gitlab-org/gitlab/issues/32696). The issue is fixed
-in GitLab 12.1. Please upgrade to GitLab 12.1 or newer.
+in GitLab 12.1. Please upgrade to GitLab 12.1 or later.
## Updating to GitLab 10.8
diff --git a/doc/administration/gitaly/praefect.md b/doc/administration/gitaly/praefect.md
index 737deaa7f4e..d1d0c358dc6 100644
--- a/doc/administration/gitaly/praefect.md
+++ b/doc/administration/gitaly/praefect.md
@@ -194,8 +194,6 @@ application server, or a Gitaly node.
- `PRAEFECT_HOST` with the IP address or hostname of the Praefect node
```ruby
- # Make Praefect accept connections on all network interfaces.
- # Use firewalls to restrict access to this address/port.
praefect['listen_addr'] = 'PRAEFECT_HOST:2305'
# Enable Prometheus metrics access to Praefect. You must use firewalls
@@ -470,12 +468,16 @@ config.
Manual failover is possible by updating `praefect['virtual_storages']` and
nominating a new primary node.
- NOTE: **Note:**: Automatic failover is not yet supported for setups with
- multiple Praefect nodes. There is currently no coordination between Praefect
- nodes, which could result in two Praefect instances thinking two different
- Gitaly nodes are the primary. Follow issue
- [#2547](https://gitlab.com/gitlab-org/gitaly/-/issues/2547) for
- updates.
+1. By default, Praefect will nominate a primary Gitaly node for each
+ shard and store the state of the primary in local memory. This state
+ does not persist across restarts and will cause a split brain
+ if multiple Praefect nodes are used for redundancy.
+
+ To avoid this limitation, enable the SQL election strategy:
+
+ ```ruby
+ praefect['failover_election_strategy'] = 'sql'
+ ```
1. Save the changes to `/etc/gitlab/gitlab.rb` and [reconfigure
Praefect](../restart_gitlab.md#omnibus-gitlab-reconfigure):
@@ -532,7 +534,7 @@ Particular attention should be shown to:
`/etc/gitlab/gitlab.rb`
```ruby
- gitaly['listen_addr'] = 'tcp://GITLAB_HOST:8075'
+ gitaly['listen_addr'] = 'GITLAB_HOST:8075'
```
1. Configure the `gitlab_shell['secret_token']` so that callbacks from Gitaly
@@ -679,8 +681,18 @@ current primary node is found to be unhealthy.
checks fail for the current primary backend Gitaly node, and new primary will
be elected. **Do not use with multiple Praefect nodes!** Using with multiple
Praefect nodes is likely to result in a split brain.
-- **PostgreSQL:** Coming soon. See isse
- [#2547](https://gitlab.com/gitlab-org/gitaly/-/issues/2547) for updates.
+- **PostgreSQL:** Enabled by setting
+ `praefect['failover_election_strategy'] = sql`. This configuration
+ option will allow multiple Praefect nodes to coordinate via the
+ PostgreSQL database to elect a primary Gitaly node. This configuration
+ will cause Praefect nodes to elect a new primary, monitor its health,
+ and elect a new primary if the current one has not been reachable in
+ 10 seconds by a majority of the Praefect nodes.
+
+NOTE: **Note:**: Praefect does not yet account for replication lag on
+the secondaries during the election process, so data loss can occur
+during a failover. Follow issue
+[#2642](https://gitlab.com/gitlab-org/gitaly/-/issues/2642) for updates.
It is likely that we will implement support for Consul, and a cloud native
strategy in the future.
diff --git a/doc/administration/high_availability/gitlab.md b/doc/administration/high_availability/gitlab.md
index cef9f9c5761..c9c425d366b 100644
--- a/doc/administration/high_availability/gitlab.md
+++ b/doc/administration/high_availability/gitlab.md
@@ -22,18 +22,10 @@ is recommended over [NFS](nfs.md) wherever possible for improved performance.
yum install nfs-utils nfs-utils-lib
```
-1. Specify the necessary NFS shares. Mounts are specified in
- `/etc/fstab`. The exact contents of `/etc/fstab` will depend on how you chose
- to configure your NFS server. See [NFS documentation](nfs.md) for the various
- options. Here is an example snippet to add to `/etc/fstab`:
-
- ```plaintext
- 10.1.0.1:/var/opt/gitlab/.ssh /var/opt/gitlab/.ssh nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
- 10.1.0.1:/var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/uploads nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
- 10.1.0.1:/var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-rails/shared nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
- 10.1.0.1:/var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/gitlab-ci/builds nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
- 10.1.0.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
- ```
+1. Specify the necessary NFS exports in `/etc/fstab`.
+ The exact contents of `/etc/fstab` will depend on how you chose
+ to configure your NFS server. See [NFS documentation](nfs.md#nfs-client-mount-options)
+ for examples and the various options.
1. Create the shared directories. These may be different depending on your NFS
mount locations.
diff --git a/doc/administration/high_availability/nfs.md b/doc/administration/high_availability/nfs.md
index 192434f7907..66f2986ab2a 100644
--- a/doc/administration/high_availability/nfs.md
+++ b/doc/administration/high_availability/nfs.md
@@ -148,12 +148,15 @@ For supported database architecture, please see our documentation on
## NFS Client mount options
-Below is an example of an NFS mount point defined in `/etc/fstab` we use on
-GitLab.com:
-
-```plaintext
-10.1.1.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
-```
+Here is an example snippet to add to `/etc/fstab`:
+
+ ```plaintext
+ 10.1.0.1:/var/opt/gitlab/.ssh /var/opt/gitlab/.ssh nfs4 defaults,vers=4.1,hard,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
+ 10.1.0.1:/var/opt/gitlab/gitlab-rails/uploads /var/opt/gitlab/gitlab-rails/uploads nfs4 defaults,vers=4.1,hard,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
+ 10.1.0.1:/var/opt/gitlab/gitlab-rails/shared /var/opt/gitlab/gitlab-rails/shared nfs4 defaults,vers=4.1,hard,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
+ 10.1.0.1:/var/opt/gitlab/gitlab-ci/builds /var/opt/gitlab/gitlab-ci/builds nfs4 defaults,vers=4.1,hard,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
+ 10.1.0.1:/var/opt/gitlab/git-data /var/opt/gitlab/git-data nfs4 defaults,vers=4.1,hard,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
+ ```
Note there are several options that you should consider using:
@@ -162,6 +165,42 @@ Note there are several options that you should consider using:
| `vers=4.1` |NFS v4.1 should be used instead of v4.0 because there is a Linux [NFS client bug in v4.0](https://gitlab.com/gitlab-org/gitaly/issues/1339) that can cause significant problems due to stale data.
| `nofail` | Don't halt boot process waiting for this mount to become available
| `lookupcache=positive` | Tells the NFS client to honor `positive` cache results but invalidates any `negative` cache results. Negative cache results cause problems with Git. Specifically, a `git push` can fail to register uniformly across all NFS clients. The negative cache causes the clients to 'remember' that the files did not exist previously.
+| `hard` | Instead of `soft`. [Further details](#soft-mount-option).
+
+### soft mount option
+
+We recommend that you use `hard` in your mount options, unless you have a specific
+reason to use `soft`.
+
+On GitLab.com, we use `soft` because there were times when we had NFS servers
+reboot and `soft` improved availability, but everyone's infrastructure is different.
+If your NFS is provided by on-premise storage arrays with redundant controllers,
+for example, you shouldn't need to worry about NFS server availability.
+
+The NFS man page states:
+
+> "soft" timeout can cause silent data corruption in certain cases
+
+Read the [Linux man page](https://linux.die.net/man/5/nfs) to understand the difference,
+and if you do use `soft`, ensure that you've taken steps to mitigate the risks.
+
+If you experience behaviour that might have been caused by
+writes to disk on the NFS server not occurring, such as commits going missing,
+use the `hard` option, because (from the man page):
+
+> use the soft option only when client responsiveness is more important than data integrity
+
+Other vendors make similar recommendations, including
+[SAP](http://wiki.scn.sap.com/wiki/x/PARnFQ) and NetApp's
+[knowledge base](https://kb.netapp.com/app/answers/answer_view/a_id/1004893/~/hard-mount-vs-soft-mount-),
+they highlight that if the NFS client driver caches data, `soft` means there is no certainty if
+writes by GitLab are actually on disk.
+
+Mount points set with the option `hard` may not perform as well, and if the
+NFS server goes down, `hard` will cause processes to hang when interacting with
+the mount point. Use `SIGKILL` (`kill -9`) to deal with hung processes.
+The `intr` option
+[stopped working in the 2.6 kernel](https://access.redhat.com/solutions/157873).
## A single NFS mount
diff --git a/doc/administration/high_availability/nfs_host_client_setup.md b/doc/administration/high_availability/nfs_host_client_setup.md
index ddc58fc0db7..6823c1d9abe 100644
--- a/doc/administration/high_availability/nfs_host_client_setup.md
+++ b/doc/administration/high_availability/nfs_host_client_setup.md
@@ -94,10 +94,11 @@ Edit `/etc/fstab` on client as below to mount the remote shares automatically at
Note that GitLab requires advisory file locking, which is only supported natively in
NFS version 4. NFSv3 also supports locking as long as Linux Kernel 2.6.5+ is used.
We recommend using version 4 and do not specifically test NFSv3.
+See [NFS documentation](nfs.md#nfs-client-mount-options) for guidance on mount options.
```text
#/etc/fstab
-165.227.159.85:/home /nfs/home nfs4 defaults,soft,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
+10.0.0.1:/nfs/home /nfs/home nfs4 defaults,hard,vers=4.1,rsize=1048576,wsize=1048576,noatime,nofail,lookupcache=positive 0 2
```
Reboot the client and confirm that the mount point is mounted automatically.
diff --git a/doc/administration/scaling/index.md b/doc/administration/scaling/index.md
index c91fe395b3d..ec7492883cc 100644
--- a/doc/administration/scaling/index.md
+++ b/doc/administration/scaling/index.md
@@ -40,9 +40,13 @@ needs.
| Object storage service | Recommended store for shared data objects | [Cloud Object Storage configuration](../high_availability/object_storage.md) |
| NFS | Shared disk storage service. Can be used as an alternative for Gitaly or Object Storage. Required for GitLab Pages | [NFS configuration](../high_availability/nfs.md) |
-## Examples
+## Reference architectures
+
+- 1 - 1000 Users: A single-node [Omnibus](https://docs.gitlab.com/omnibus/) setup with frequent backups. Refer to the [Single-node Omnibus installation](#single-node-installation) section below.
+- 1000 to 50000+ Users: A [Scaled-out Omnibus installation with multiple servers](#multi-node-installation-scaled-out-for-availability), it can be with or without high-availability components applied.
+ - To decide the level of Availability please refer to our [Availability](../availability/index.md) page.
-### Single-node Omnibus installation
+### Single-node installation
This solution is appropriate for many teams that have a single server at their disposal. With automatic backup of the GitLab repositories, configuration, and the database, this can be an optimal solution if you don't have strict availability requirements.
@@ -55,7 +59,7 @@ References:
- [Installation Docs](../../install/README.md)
- [Backup/Restore Docs](https://docs.gitlab.com/omnibus/settings/backups.html#backup-and-restore-omnibus-gitlab-configuration)
-### Omnibus installation with multiple application servers
+### Multi-node installation (scaled out for availability)
This solution is appropriate for teams that are starting to scale out when
scaling up is no longer meeting their needs. In this configuration, additional application nodes will handle frontend traffic, with a load balancer in front to distribute traffic across those nodes. Meanwhile, each application node connects to a shared file server and PostgreSQL and Redis services on the back end.
@@ -72,14 +76,6 @@ References:
- [Configure packaged PostgreSQL server to listen on TCP/IP](https://docs.gitlab.com/omnibus/settings/database.html#configure-packaged-postgresql-server-to-listen-on-tcpip)
- [Setting up a Redis-only server](https://docs.gitlab.com/omnibus/settings/redis.html#setting-up-a-redis-only-server)
-## Recommended setups based on number of users
-
-- 1 - 1000 Users: A single-node [Omnibus](https://docs.gitlab.com/omnibus/) setup with frequent backups. Refer to the [requirements page](../../install/requirements.md) for further details of the specs you will require.
-- 1000 - 10000 Users: A scaled environment based on one of our [Reference Architectures](#reference-architectures), without the HA components applied. This can be a reasonable step towards a fully HA environment.
-- 2000 - 50000+ Users: A scaled HA environment based on one of our [Reference Architectures](#reference-architectures) below.
-
-## Reference architectures
-
In this section we'll detail the Reference Architectures that can support large numbers
of users. These were built, tested and verified by our Quality and Support teams.
@@ -99,7 +95,7 @@ how much automation you use, mirroring, and repo/change size. Additionally the
shown memory values are given directly by [GCP machine types](https://cloud.google.com/compute/docs/machine-types).
On different cloud vendors a best effort like for like can be used.
-### 2,000 user configuration
+#### 2,000 user configuration
- **Supported users (approximate):** 2,000
- **Test RPS rates:** API: 40 RPS, Web: 4 RPS, Git: 4 RPS
@@ -120,7 +116,7 @@ On different cloud vendors a best effort like for like can be used.
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 | c5.large |
| Internal load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 | c5.large |
-### 5,000 user configuration
+#### 5,000 user configuration
- **Supported users (approximate):** 5,000
- **Test RPS rates:** API: 100 RPS, Web: 10 RPS, Git: 10 RPS
@@ -141,7 +137,7 @@ On different cloud vendors a best effort like for like can be used.
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 | c5.large |
| Internal load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 | c5.large |
-### 10,000 user configuration
+#### 10,000 user configuration
- **Supported users (approximate):** 10,000
- **Test RPS rates:** API: 200 RPS, Web: 20 RPS, Git: 20 RPS
@@ -165,7 +161,7 @@ On different cloud vendors a best effort like for like can be used.
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 | c5.large |
| Internal load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 | c5.large |
-### 25,000 user configuration
+#### 25,000 user configuration
- **Supported users (approximate):** 25,000
- **Test RPS rates:** API: 500 RPS, Web: 50 RPS, Git: 50 RPS
@@ -189,7 +185,7 @@ On different cloud vendors a best effort like for like can be used.
| External load balancing node[^6] | 1 | 2 vCPU, 1.8GB Memory | n1-highcpu-2 | c5.large |
| Internal load balancing node[^6] | 1 | 4 vCPU, 3.6GB Memory | n1-highcpu-4 | c5.xlarge |
-### 50,000 user configuration
+#### 50,000 user configuration
- **Supported users (approximate):** 50,000
- **Test RPS rates:** API: 1000 RPS, Web: 100 RPS, Git: 100 RPS
diff --git a/doc/api/deploy_tokens.md b/doc/api/deploy_tokens.md
index 4663159f1eb..461957847df 100644
--- a/doc/api/deploy_tokens.md
+++ b/doc/api/deploy_tokens.md
@@ -92,7 +92,7 @@ POST /projects/:id/deploy_tokens
| `name` | string | yes | New deploy token's name |
| `expires_at` | datetime | no | Expiration date for the deploy token. Does not expire if no value is provided. |
| `username` | string | no | Username for deploy token. Default is `gitlab+deploy-token-{n}` |
-| `scopes` | array of strings | yes | Indicates the deploy token scopes. Must be at least one of `read_repository` or `read_registry`. |
+| `scopes` | array of strings | yes | Indicates the deploy token scopes. Must be at least one of `read_repository`, `read_registry`, or `write_registry`. |
```shell
curl --request POST --header "PRIVATE-TOKEN: <your_access_token>" --header "Content-Type: application/json" --data '{"name": "My deploy token", "expires_at": "2021-01-01", "username": "custom-user", "scopes": ["read_repository"]}' "https://gitlab.example.com/api/v4/projects/5/deploy_tokens/"
@@ -193,7 +193,7 @@ POST /groups/:id/deploy_tokens
| `name` | string | yes | New deploy token's name |
| `expires_at` | datetime | no | Expiration date for the deploy token. Does not expire if no value is provided. |
| `username` | string | no | Username for deploy token. Default is `gitlab+deploy-token-{n}` |
-| `scopes` | array of strings | yes | Indicates the deploy token scopes. Must be at least one of `read_repository` or `read_registry`. |
+| `scopes` | array of strings | yes | Indicates the deploy token scopes. Must be at least one of `read_repository`, `read_registry`, or `write_registry`. |
Example request:
diff --git a/doc/api/graphql/reference/gitlab_schema.graphql b/doc/api/graphql/reference/gitlab_schema.graphql
index 4ce54a1b3fb..eb9b285803d 100644
--- a/doc/api/graphql/reference/gitlab_schema.graphql
+++ b/doc/api/graphql/reference/gitlab_schema.graphql
@@ -1832,6 +1832,46 @@ type DiscussionEdge {
node: Discussion
}
+"""
+Autogenerated input type of DismissVulnerability
+"""
+input DismissVulnerabilityInput {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ Reason why vulnerability should be dismissed
+ """
+ comment: String
+
+ """
+ ID of the vulnerability to be dismissed
+ """
+ id: ID!
+}
+
+"""
+Autogenerated return type of DismissVulnerability
+"""
+type DismissVulnerabilityPayload {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+
+ """
+ Reasons why the mutation failed.
+ """
+ errors: [String!]!
+
+ """
+ The vulnerability after dismissal
+ """
+ vulnerability: Vulnerability
+}
+
interface Entry {
"""
Flat path of the entry
@@ -5413,6 +5453,7 @@ type Mutation {
designManagementUpload(input: DesignManagementUploadInput!): DesignManagementUploadPayload
destroyNote(input: DestroyNoteInput!): DestroyNotePayload
destroySnippet(input: DestroySnippetInput!): DestroySnippetPayload
+ dismissVulnerability(input: DismissVulnerabilityInput!): DismissVulnerabilityPayload
epicAddIssue(input: EpicAddIssueInput!): EpicAddIssuePayload
epicSetSubscription(input: EpicSetSubscriptionInput!): EpicSetSubscriptionPayload
epicTreeReorder(input: EpicTreeReorderInput!): EpicTreeReorderPayload
@@ -9535,6 +9576,11 @@ type Vulnerability {
title: String
"""
+ Permissions for the current user on the resource
+ """
+ userPermissions: VulnerabilityPermissions!
+
+ """
URL to the vulnerability's details page
"""
vulnerabilityPath: String
@@ -9576,6 +9622,51 @@ type VulnerabilityEdge {
}
"""
+Check permissions for the current user on a vulnerability
+"""
+type VulnerabilityPermissions {
+ """
+ Indicates the user can perform `admin_vulnerability` on this resource
+ """
+ adminVulnerability: Boolean!
+
+ """
+ Indicates the user can perform `admin_vulnerability_issue_link` on this resource
+ """
+ adminVulnerabilityIssueLink: Boolean!
+
+ """
+ Indicates the user can perform `create_vulnerability` on this resource
+ """
+ createVulnerability: Boolean!
+
+ """
+ Indicates the user can perform `create_vulnerability_export` on this resource
+ """
+ createVulnerabilityExport: Boolean!
+
+ """
+ Indicates the user can perform `create_vulnerability_feedback` on this resource
+ """
+ createVulnerabilityFeedback: Boolean!
+
+ """
+ Indicates the user can perform `destroy_vulnerability_feedback` on this resource
+ """
+ destroyVulnerabilityFeedback: Boolean!
+
+ """
+ Indicates the user can perform `read_vulnerability_feedback` on this resource
+ """
+ readVulnerabilityFeedback: Boolean!
+
+ """
+ Indicates the user can perform `update_vulnerability_feedback` on this resource
+ """
+ updateVulnerabilityFeedback: Boolean!
+}
+
+"""
The type of the security scan that found the vulnerability.
"""
enum VulnerabilityReportType {
diff --git a/doc/api/graphql/reference/gitlab_schema.json b/doc/api/graphql/reference/gitlab_schema.json
index bf8206e61cc..f6c3510d6dc 100644
--- a/doc/api/graphql/reference/gitlab_schema.json
+++ b/doc/api/graphql/reference/gitlab_schema.json
@@ -5394,6 +5394,118 @@
"possibleTypes": null
},
{
+ "kind": "INPUT_OBJECT",
+ "name": "DismissVulnerabilityInput",
+ "description": "Autogenerated input type of DismissVulnerability",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "id",
+ "description": "ID of the vulnerability to be dismissed",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "ID",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "comment",
+ "description": "Reason why vulnerability should be dismissed",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "DismissVulnerabilityPayload",
+ "description": "Autogenerated return type of DismissVulnerability",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "errors",
+ "description": "Reasons why the mutation failed.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "LIST",
+ "name": null,
+ "ofType": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ }
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "vulnerability",
+ "description": "The vulnerability after dismissal",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "Vulnerability",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
"kind": "INTERFACE",
"name": "Entry",
"description": null,
@@ -15822,6 +15934,33 @@
"deprecationReason": null
},
{
+ "name": "dismissVulnerability",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "DismissVulnerabilityInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "DismissVulnerabilityPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "epicAddIssue",
"description": null,
"args": [
@@ -28763,6 +28902,24 @@
"deprecationReason": null
},
{
+ "name": "userPermissions",
+ "description": "Permissions for the current user on the resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "OBJECT",
+ "name": "VulnerabilityPermissions",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
"name": "vulnerabilityPath",
"description": "URL to the vulnerability's details page",
"args": [
@@ -28897,6 +29054,163 @@
"possibleTypes": null
},
{
+ "kind": "OBJECT",
+ "name": "VulnerabilityPermissions",
+ "description": "Check permissions for the current user on a vulnerability",
+ "fields": [
+ {
+ "name": "adminVulnerability",
+ "description": "Indicates the user can perform `admin_vulnerability` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "adminVulnerabilityIssueLink",
+ "description": "Indicates the user can perform `admin_vulnerability_issue_link` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "createVulnerability",
+ "description": "Indicates the user can perform `create_vulnerability` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "createVulnerabilityExport",
+ "description": "Indicates the user can perform `create_vulnerability_export` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "createVulnerabilityFeedback",
+ "description": "Indicates the user can perform `create_vulnerability_feedback` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "destroyVulnerabilityFeedback",
+ "description": "Indicates the user can perform `destroy_vulnerability_feedback` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "readVulnerabilityFeedback",
+ "description": "Indicates the user can perform `read_vulnerability_feedback` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "updateVulnerabilityFeedback",
+ "description": "Indicates the user can perform `update_vulnerability_feedback` on this resource",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ }
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
"kind": "ENUM",
"name": "VulnerabilityReportType",
"description": "The type of the security scan that found the vulnerability.",
diff --git a/doc/api/graphql/reference/index.md b/doc/api/graphql/reference/index.md
index e1375530bf4..082d7decbf9 100644
--- a/doc/api/graphql/reference/index.md
+++ b/doc/api/graphql/reference/index.md
@@ -317,6 +317,16 @@ Autogenerated return type of DestroySnippet
| `id` | ID! | ID of this discussion |
| `replyId` | ID! | ID used to reply to this discussion |
+## DismissVulnerabilityPayload
+
+Autogenerated return type of DismissVulnerability
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `clientMutationId` | String | A unique identifier for the client performing the mutation. |
+| `errors` | String! => Array | Reasons why the mutation failed. |
+| `vulnerability` | Vulnerability | The vulnerability after dismissal |
+
## Environment
Describes where code is deployed for a project
@@ -1495,8 +1505,24 @@ Represents a vulnerability.
| `severity` | VulnerabilitySeverity | Severity of the vulnerability (INFO, UNKNOWN, LOW, MEDIUM, HIGH, CRITICAL) |
| `state` | VulnerabilityState | State of the vulnerability (DETECTED, DISMISSED, RESOLVED, CONFIRMED) |
| `title` | String | Title of the vulnerability |
+| `userPermissions` | VulnerabilityPermissions! | Permissions for the current user on the resource |
| `vulnerabilityPath` | String | URL to the vulnerability's details page |
+## VulnerabilityPermissions
+
+Check permissions for the current user on a vulnerability
+
+| Name | Type | Description |
+| --- | ---- | ---------- |
+| `adminVulnerability` | Boolean! | Indicates the user can perform `admin_vulnerability` on this resource |
+| `adminVulnerabilityIssueLink` | Boolean! | Indicates the user can perform `admin_vulnerability_issue_link` on this resource |
+| `createVulnerability` | Boolean! | Indicates the user can perform `create_vulnerability` on this resource |
+| `createVulnerabilityExport` | Boolean! | Indicates the user can perform `create_vulnerability_export` on this resource |
+| `createVulnerabilityFeedback` | Boolean! | Indicates the user can perform `create_vulnerability_feedback` on this resource |
+| `destroyVulnerabilityFeedback` | Boolean! | Indicates the user can perform `destroy_vulnerability_feedback` on this resource |
+| `readVulnerabilityFeedback` | Boolean! | Indicates the user can perform `read_vulnerability_feedback` on this resource |
+| `updateVulnerabilityFeedback` | Boolean! | Indicates the user can perform `update_vulnerability_feedback` on this resource |
+
## VulnerabilitySeveritiesCount
Represents vulnerability counts by severity
diff --git a/doc/api/group_clusters.md b/doc/api/group_clusters.md
index e9b4b2b92ab..01c6d59f60d 100644
--- a/doc/api/group_clusters.md
+++ b/doc/api/group_clusters.md
@@ -224,6 +224,7 @@ Parameters:
| `cluster_id` | integer | yes | The ID of the cluster |
| `name` | string | no | The name of the cluster |
| `domain` | string | no | The [base domain](../user/group/clusters/index.md#base-domain) of the cluster |
+| `management_project_id` | integer | no | The ID of the [management project](../user/clusters/management_project.md) for the cluster |
| `platform_kubernetes_attributes[api_url]` | string | no | The URL to access the Kubernetes API |
| `platform_kubernetes_attributes[token]` | string | no | The token to authenticate against Kubernetes |
| `platform_kubernetes_attributes[ca_cert]` | string | no | TLS certificate. Required if API is using a self-signed TLS certificate. |
diff --git a/doc/api/merge_requests.md b/doc/api/merge_requests.md
index 454d4a2b35b..adb5b00085e 100644
--- a/doc/api/merge_requests.md
+++ b/doc/api/merge_requests.md
@@ -904,7 +904,7 @@ The new pipeline can be:
- A detached merge request pipeline.
- A [pipeline for merged results](../ci/merge_request_pipelines/pipelines_for_merged_results/index.md)
- if the [project setting is enabled](../ci/merge_request_pipelines/pipelines_for_merged_results/index.md#enabling-pipelines-for-merged-results).
+ if the [project setting is enabled](../ci/merge_request_pipelines/pipelines_for_merged_results/index.md#enable-pipelines-for-merged-results).
```plaintext
POST /projects/:id/merge_requests/:merge_request_iid/pipelines
diff --git a/doc/api/pipelines.md b/doc/api/pipelines.md
index 490f1304130..5b67df14ace 100644
--- a/doc/api/pipelines.md
+++ b/doc/api/pipelines.md
@@ -12,7 +12,7 @@ GET /projects/:id/pipelines
|-----------|---------|----------|---------------------|
| `id` | integer/string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
| `scope` | string | no | The scope of pipelines, one of: `running`, `pending`, `finished`, `branches`, `tags` |
-| `status` | string | no | The status of pipelines, one of: `running`, `pending`, `success`, `failed`, `canceled`, `skipped`, `created` |
+| `status` | string | no | The status of pipelines, one of: `running`, `pending`, `success`, `failed`, `canceled`, `skipped`, `created`, `manual` |
| `ref` | string | no | The ref of pipelines |
| `sha` | string | no | The SHA of pipelines |
| `yaml_errors`| boolean | no | Returns pipelines with invalid configurations |
diff --git a/doc/api/project_clusters.md b/doc/api/project_clusters.md
index 2ed57eceb85..79800af2f59 100644
--- a/doc/api/project_clusters.md
+++ b/doc/api/project_clusters.md
@@ -179,6 +179,7 @@ Parameters:
| `id` | integer | yes | The ID of the project owned by the authenticated user |
| `name` | string | yes | The name of the cluster |
| `domain` | string | no | The [base domain](../user/project/clusters/index.md#base-domain) of the cluster |
+| `management_project_id` | integer | no | The ID of the [management project](../user/clusters/management_project.md) for the cluster |
| `enabled` | boolean | no | Determines if cluster is active or not, defaults to true |
| `managed` | boolean | no | Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true |
| `platform_kubernetes_attributes[api_url]` | string | yes | The URL to access the Kubernetes API |
diff --git a/doc/api/vulnerability_issue_links.md b/doc/api/vulnerability_issue_links.md
deleted file mode 100644
index 05213e788c4..00000000000
--- a/doc/api/vulnerability_issue_links.md
+++ /dev/null
@@ -1,217 +0,0 @@
-# Vulnerability Issue links API **(ULTIMATE)**
-
-CAUTION: **Caution:**
-This API is in an alpha stage and considered unstable.
-The response payload may be subject to change or breakage
-across GitLab releases.
-
-## List related issues
-
-Get a list of related issues of a given issue, sorted by the relationship creation datetime (ascending).
-Issues will be filtered according to the user authorizations.
-
-```plaintext
-GET /projects/:id/issues/:issue_iid/links
-```
-
-Parameters:
-
-| Attribute | Type | Required | Description |
-|-------------|---------|----------|--------------------------------------|
-| `id` | integer or string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `issue_iid` | integer | yes | The internal ID of a project's issue |
-
-```json
-[
- {
- "id" : 84,
- "iid" : 14,
- "issue_link_id": 1
- "project_id" : 4,
- "created_at" : "2016-01-07T12:44:33.959Z",
- "title" : "Issues with auth",
- "state" : "opened",
- "assignees" : [],
- "assignee" : null,
- "labels" : [
- "bug"
- ],
- "author" : {
- "name" : "Alexandra Bashirian",
- "avatar_url" : null,
- "state" : "active",
- "web_url" : "https://gitlab.example.com/eileen.lowe",
- "id" : 18,
- "username" : "eileen.lowe"
- },
- "description" : null,
- "updated_at" : "2016-01-07T12:44:33.959Z",
- "milestone" : null,
- "subscribed" : true,
- "user_notes_count": 0,
- "due_date": null,
- "web_url": "http://example.com/example/example/issues/14",
- "confidential": false,
- "weight": null,
- }
-]
-```
-
-## Create an issue link
-
-Creates a two-way relation between two issues. User must be allowed to update both issues in order to succeed.
-
-```plaintext
-POST /projects/:id/issues/:issue_iid/links
-```
-
-| Attribute | Type | Required | Description |
-|-------------|---------|----------|--------------------------------------|
-| `id` | integer or string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `issue_iid` | integer | yes | The internal ID of a project's issue |
-| `target_project_id` | integer or string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) of a target project |
-| `target_issue_iid` | integer or string | yes | The internal ID of a target project's issue |
-
-```json
-{
- "source_issue" : {
- "id" : 83,
- "iid" : 11,
- "project_id" : 4,
- "created_at" : "2016-01-07T12:44:33.959Z",
- "title" : "Issues with auth",
- "state" : "opened",
- "assignees" : [],
- "assignee" : null,
- "labels" : [
- "bug"
- ],
- "author" : {
- "name" : "Alexandra Bashirian",
- "avatar_url" : null,
- "state" : "active",
- "web_url" : "https://gitlab.example.com/eileen.lowe",
- "id" : 18,
- "username" : "eileen.lowe"
- },
- "description" : null,
- "updated_at" : "2016-01-07T12:44:33.959Z",
- "milestone" : null,
- "subscribed" : true,
- "user_notes_count": 0,
- "due_date": null,
- "web_url": "http://example.com/example/example/issues/11",
- "confidential": false,
- "weight": null,
- },
- "target_issue" : {
- "id" : 84,
- "iid" : 14,
- "project_id" : 4,
- "created_at" : "2016-01-07T12:44:33.959Z",
- "title" : "Issues with auth",
- "state" : "opened",
- "assignees" : [],
- "assignee" : null,
- "labels" : [
- "bug"
- ],
- "author" : {
- "name" : "Alexandra Bashirian",
- "avatar_url" : null,
- "state" : "active",
- "web_url" : "https://gitlab.example.com/eileen.lowe",
- "id" : 18,
- "username" : "eileen.lowe"
- },
- "description" : null,
- "updated_at" : "2016-01-07T12:44:33.959Z",
- "milestone" : null,
- "subscribed" : true,
- "user_notes_count": 0,
- "due_date": null,
- "web_url": "http://example.com/example/example/issues/14",
- "confidential": false,
- "weight": null,
- }
-}
-```
-
-## Delete an issue link
-
-Deletes an issue link, removing the two-way relationship.
-
-```plaintext
-DELETE /projects/:id/issues/:issue_iid/links/:issue_link_id
-```
-
-| Attribute | Type | Required | Description |
-|-------------|---------|----------|--------------------------------------|
-| `id` | integer or string | yes | The ID or [URL-encoded path of the project](README.md#namespaced-path-encoding) owned by the authenticated user |
-| `issue_iid` | integer | yes | The internal ID of a project's issue |
-| `issue_link_id` | integer or string | yes | The ID of an issue relationship |
-
-```json
-{
- "source_issue" : {
- "id" : 83,
- "iid" : 11,
- "project_id" : 4,
- "created_at" : "2016-01-07T12:44:33.959Z",
- "title" : "Issues with auth",
- "state" : "opened",
- "assignees" : [],
- "assignee" : null,
- "labels" : [
- "bug"
- ],
- "author" : {
- "name" : "Alexandra Bashirian",
- "avatar_url" : null,
- "state" : "active",
- "web_url" : "https://gitlab.example.com/eileen.lowe",
- "id" : 18,
- "username" : "eileen.lowe"
- },
- "description" : null,
- "updated_at" : "2016-01-07T12:44:33.959Z",
- "milestone" : null,
- "subscribed" : true,
- "user_notes_count": 0,
- "due_date": null,
- "web_url": "http://example.com/example/example/issues/11",
- "confidential": false,
- "weight": null,
- },
- "target_issue" : {
- "id" : 84,
- "iid" : 14,
- "project_id" : 4,
- "created_at" : "2016-01-07T12:44:33.959Z",
- "title" : "Issues with auth",
- "state" : "opened",
- "assignees" : [],
- "assignee" : null,
- "labels" : [
- "bug"
- ],
- "author" : {
- "name" : "Alexandra Bashirian",
- "avatar_url" : null,
- "state" : "active",
- "web_url" : "https://gitlab.example.com/eileen.lowe",
- "id" : 18,
- "username" : "eileen.lowe"
- },
- "description" : null,
- "updated_at" : "2016-01-07T12:44:33.959Z",
- "milestone" : null,
- "subscribed" : true,
- "user_notes_count": 0,
- "due_date": null,
- "web_url": "http://example.com/example/example/issues/14",
- "confidential": false,
- "weight": null,
- }
-}
-```
diff --git a/doc/ci/docker/using_docker_build.md b/doc/ci/docker/using_docker_build.md
index 69618cbd218..acdc61d008f 100644
--- a/doc/ci/docker/using_docker_build.md
+++ b/doc/ci/docker/using_docker_build.md
@@ -120,7 +120,7 @@ not without its own challenges:
- By default, Docker 17.09 and higher uses `--storage-driver overlay2` which is
the recommended storage driver. See [Using the overlayfs driver](#using-the-overlayfs-driver)
for details.
-- Since the `docker:19.03.1-dind` container and the Runner container don't share their
+- Since the `docker:19.03.8-dind` container and the Runner container don't share their
root filesystem, the job's working directory can be used as a mount point for
child containers. For example, if you have files you want to share with a
child container, you may create a subdirectory under `/builds/$CI_PROJECT_PATH`
@@ -139,7 +139,7 @@ not without its own challenges:
An example project using this approach can be found here: <https://gitlab.com/gitlab-examples/docker>.
In the examples below, we are using Docker images tags to specify a
-specific version, such as `docker:19.03.1`. If tags like `docker:stable`
+specific version, such as `docker:19.03.8`. If tags like `docker:stable`
are used, you have no control over what version is going to be used and this
can lead to unpredictable behavior, especially when new versions are
released.
@@ -150,7 +150,7 @@ NOTE: **Note**
This requires GitLab Runner 11.11 or higher.
The Docker daemon supports connection over TLS and it's done by default
-for Docker 19.03.1 or higher. This is the **suggested** way to use the
+for Docker 19.03.8 or higher. This is the **suggested** way to use the
docker-in-docker service and
[GitLab.com Shared Runners](../../user/gitlab_com/index.md#shared-runners)
support this.
@@ -166,13 +166,13 @@ support this.
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
- --docker-image "docker:19.03.1" \
+ --docker-image "docker:19.03.8" \
--docker-privileged \
--docker-volumes "/certs/client"
```
The above command will register a new Runner to use the special
- `docker:19.03.1` image, which is provided by Docker. **Notice that it's
+ `docker:19.03.8` image, which is provided by Docker. **Notice that it's
using the `privileged` mode to start the build and service
containers.** If you want to use [docker-in-docker](https://www.docker.com/blog/docker-can-now-run-within-docker/) mode, you always
have to use `privileged = true` in your Docker containers.
@@ -191,7 +191,7 @@ support this.
executor = "docker"
[runners.docker]
tls_verify = false
- image = "docker:19.03.1"
+ image = "docker:19.03.8"
privileged = true
disable_cache = false
volumes = ["/certs/client", "/cache"]
@@ -201,18 +201,18 @@ support this.
```
1. You can now use `docker` in the build script (note the inclusion of the
- `docker:19.03.1-dind` service):
+ `docker:19.03.8-dind` service):
```yaml
- image: docker:19.03.1
+ image: docker:19.03.8
variables:
# When using dind service, we need to instruct docker, to talk with
# the daemon started inside of the service. The daemon is available
# with a network connection instead of the default
- # /var/run/docker.sock socket. docker:19.03.1 does this automatically
+ # /var/run/docker.sock socket. Docker 19.03 does this automatically
# by setting the DOCKER_HOST in
- # https://github.com/docker-library/docker/blob/d45051476babc297257df490d22cbd806f1b11e4/19.03.1/docker-entrypoint.sh#L23-L29
+ # https://github.com/docker-library/docker/blob/d45051476babc297257df490d22cbd806f1b11e4/19.03/docker-entrypoint.sh#L23-L29
#
# The 'docker' hostname is the alias of the service container as described at
# https://docs.gitlab.com/ee/ci/docker/using_docker_images.html#accessing-the-services.
@@ -229,7 +229,7 @@ support this.
DOCKER_TLS_CERTDIR: "/certs"
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
before_script:
- docker info
@@ -256,7 +256,7 @@ Assuming that the Runner `config.toml` is similar to:
executor = "docker"
[runners.docker]
tls_verify = false
- image = "docker:19.03.1"
+ image = "docker:19.03.8"
privileged = true
disable_cache = false
volumes = ["/cache"]
@@ -266,10 +266,10 @@ Assuming that the Runner `config.toml` is similar to:
```
You can now use `docker` in the build script (note the inclusion of the
-`docker:19.03.1-dind` service):
+`docker:19.03.8-dind` service):
```yaml
-image: docker:19.03.1
+image: docker:19.03.8
variables:
# When using dind service we need to instruct docker, to talk with the
@@ -290,7 +290,7 @@ variables:
DOCKER_TLS_CERTDIR: ""
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
before_script:
- docker info
@@ -310,7 +310,7 @@ container so that Docker is available in the context of that image.
NOTE: **Note:**
If you bind the Docker socket [when using GitLab Runner 11.11 or
newer](https://gitlab.com/gitlab-org/gitlab-runner/-/merge_requests/1261),
-you can no longer use `docker:19.03.1-dind` as a service because volume bindings
+you can no longer use `docker:19.03.8-dind` as a service because volume bindings
are done to the services as well, making these incompatible.
In order to do that, follow the steps:
@@ -325,12 +325,12 @@ In order to do that, follow the steps:
--registration-token REGISTRATION_TOKEN \
--executor docker \
--description "My Docker Runner" \
- --docker-image "docker:19.03.1" \
+ --docker-image "docker:19.03.8" \
--docker-volumes /var/run/docker.sock:/var/run/docker.sock
```
The above command will register a new Runner to use the special
- `docker:19.03.1` image which is provided by Docker. **Notice that it's using
+ `docker:19.03.8` image which is provided by Docker. **Notice that it's using
the Docker daemon of the Runner itself, and any containers spawned by Docker
commands will be siblings of the Runner rather than children of the Runner.**
This may have complications and limitations that are unsuitable for your workflow.
@@ -344,7 +344,7 @@ In order to do that, follow the steps:
executor = "docker"
[runners.docker]
tls_verify = false
- image = "docker:19.03.1"
+ image = "docker:19.03.8"
privileged = false
disable_cache = false
volumes = ["/var/run/docker.sock:/var/run/docker.sock", "/cache"]
@@ -353,11 +353,11 @@ In order to do that, follow the steps:
```
1. You can now use `docker` in the build script (note that you don't need to
- include the `docker:19.03.1-dind` service as when using the Docker in Docker
+ include the `docker:19.03.8-dind` service as when using the Docker in Docker
executor):
```yaml
- image: docker:19.03.1
+ image: docker:19.03.8
before_script:
- docker info
@@ -411,10 +411,10 @@ any image that's used with the `--cache-from` argument must first be pulled
Here's a `.gitlab-ci.yml` file showing how Docker caching can be used:
```yaml
-image: docker:19.03.1
+image: docker:19.03.8
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
variables:
# Use TLS https://docs.gitlab.com/ee/ci/docker/using_docker_build.html#tls-enabled
diff --git a/doc/ci/merge_request_pipelines/index.md b/doc/ci/merge_request_pipelines/index.md
index 315d552e5d2..b57340347d2 100644
--- a/doc/ci/merge_request_pipelines/index.md
+++ b/doc/ci/merge_request_pipelines/index.md
@@ -7,9 +7,8 @@ last_update: 2019-07-03
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/issues/15310) in GitLab 11.6.
-In a [basic configuration](../pipelines/pipeline_architectures.md), GitLab runs a pipeline each time
-changes are pushed to a branch. The settings in the [`.gitlab-ci.yml`](../yaml/README.md)
-file, including `rules`, `only`, and `except`, determine which jobs are added to a pipeline.
+In a [basic configuration](../pipelines/pipeline_architectures.md#basic-pipelines), GitLab runs a pipeline each time
+changes are pushed to a branch.
If you want the pipeline to run jobs **only** when merge requests are created or updated,
you can use *pipelines for merge requests*.
@@ -28,7 +27,7 @@ A few notes:
## Configuring pipelines for merge requests
-To configure pipelines for merge requests, configure your CI yaml file.
+To configure pipelines for merge requests, configure your [CI/CD configuration file](../yaml/README.md).
There are a few different ways to do this.
### Enable pipelines for merge requests for all jobs
diff --git a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md
index 5a732248ec1..fb5c7830ac2 100644
--- a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md
+++ b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/index.md
@@ -36,31 +36,41 @@ again run against the merged results.
## Requirements and limitations
-Pipelines for merged results require a [GitLab Runner][runner] 11.9 or newer.
-
-[runner]: https://gitlab.com/gitlab-org/gitlab-runner
-
-In addition, pipelines for merged results have the following limitations:
+Pipelines for merged results have the following requirements and limitations:
+- Pipelines for merged results require [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner) 11.9 or newer.
- Forking/cross-repo workflows are not currently supported. To follow progress,
see [#11934](https://gitlab.com/gitlab-org/gitlab/issues/11934).
- This feature is not available for
[fast forward merges](../../../user/project/merge_requests/fast_forward_merge.md) yet.
To follow progress, see [#58226](https://gitlab.com/gitlab-org/gitlab/-/issues/26996).
-## Enabling Pipelines for Merged Results
+## Enable pipelines for merged results
-To enable pipelines on merged results at the project level:
+To enable pipelines for merged results for your project:
+1. [Configure your CI/CD configuration file](../index.md#configuring-pipelines-for-merge-requests)
+ so that the pipeline or individual jobs run for merge requests.
1. Visit your project's **Settings > General** and expand **Merge requests**.
1. Check **Merge pipelines will try to validate the post-merge result prior to merging**.
-1. Click **Save changes** button.
+1. Click **Save changes**.
+
+CAUTION: **Caution:**
+If you select the check box but don't configure your CI/CD to use
+pipelines for merge requests, your merge requests may become stuck in an
+unresolved state or your pipelines may be dropped.
+
+## Using Merge Trains
-![Merge request pipeline config](img/merge_request_pipeline_config.png)
+When you enable [Pipelines for merged results](#pipelines-for-merged-results-premium),
+GitLab [automatically displays](merge_trains/index.md#add-a-merge-request-to-a-merge-train)
+a **Start/Add Merge Train button**.
-CAUTION: **Warning:**
-Make sure your `gitlab-ci.yml` file is [configured properly for pipelines for merge requests](../index.md#configuring-pipelines-for-merge-requests),
-otherwise pipelines for merged results won't run and your merge requests will be stuck in an unresolved state.
+Generally, this is a safer option than merging merge requests immediately, because your
+merge request will be evaluated with an expected post-merge result before the actual
+merge happens.
+
+For more information, read the [documentation on Merge Trains](merge_trains/index.md).
## Automatic pipeline cancelation
@@ -118,15 +128,3 @@ which indicates that the checkout-SHA is not found in the merge ref.
This behavior was improved at GitLab 12.4 by introducing [Persistent pipeline refs](../../pipelines/index.md#troubleshooting-fatal-reference-is-not-a-tree).
You should be able to create pipelines at any timings without concerning the error.
-
-## Using Merge Trains **(PREMIUM)**
-
-By enabling [Pipelines for merged results](#pipelines-for-merged-results-premium),
-GitLab will [automatically display](merge_trains/index.md#how-to-add-a-merge-request-to-a-merge-train)
-a **Start/Add Merge Train button** as the most recommended merge strategy.
-
-Generally, this is a safer option than merging merge requests immediately as your
-merge request will be evaluated with an expected post-merge result before the actual
-merge happens.
-
-For more information, read the [documentation on Merge Trains](merge_trains/index.md).
diff --git a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_confirmation_dialog_v12_6.png b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_confirmation_dialog_v12_6.png
deleted file mode 100644
index 241f837a748..00000000000
--- a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_confirmation_dialog_v12_6.png
+++ /dev/null
Binary files differ
diff --git a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_v12_6.png b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_v12_6.png
index b46522395e0..de5897c271b 100644
--- a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_v12_6.png
+++ b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/img/merge_train_immediate_merge_v12_6.png
Binary files differ
diff --git a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md
index 1859328247b..b6706c2a272 100644
--- a/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md
+++ b/doc/ci/merge_request_pipelines/pipelines_for_merged_results/merge_trains/index.md
@@ -19,21 +19,18 @@ Merging now could introduce breaking changes.
*Merge trains* can prevent this from happening. A merge train is a queued list of merge
requests, each waiting to be merged into the target branch.
-Each merge request on the train runs the merged results pipeline immediately before its
-changes are merged into the target branch. If the pipeline fails, the breaking changes are
-not merged, and the target branch is unaffected.
+Many merge requests can be added to the train. Each merge request runs its own merged results pipeline,
+which includes the changes from all of the other merge requests in *front* of it on the train.
+All the pipelines run in parallel, to save time.
-Many merge requests can be added to the train. Each is trying to merge into the target branch.
-Each request runs its own merged results pipeline, which includes the changes from
-all of the other merge requests in *front* of it on the train. All the pipelines run
-in parallel, to save time.
+If the pipeline for a merge request fails, the breaking changes are not merged, and the target
+branch is unaffected. The merge request is removed from the train, and all pipelines behind it restart.
If the pipeline for the merge request at the front of the train completes successfully,
-the changes are merged into the target branch, and the other pipelines will continue to
+the changes are merged into the target branch, and the other pipelines continue to
run.
-If one of the pipelines fails, it is removed from the train, and all pipelines behind
-it restart, but without the changes that were removed.
+## Merge train example
Three merge requests (`A`, `B` and `C`) are added to a merge train in order, which
creates three merged results pipelines that run in parallel:
@@ -50,85 +47,99 @@ to run. If more merge requests are added to the train, they will now include the
changes that are included in the target branch, and the `C` changes that are from
the merge request already in the train.
-Learn more about
+Read more about
[how merge trains keep your master green](https://about.gitlab.com/blog/2020/01/30/all-aboard-merge-trains/).
+<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
+Watch this video for a demonstration on [how parallel execution
+of Merge Trains can prevent commits from breaking the default
+branch](https://www.youtube.com/watch?v=D4qCqXgZkHQ).
+
## Requirements and limitations
Merge trains have the following requirements and limitations:
+- Merge trains require [GitLab Runner](https://gitlab.com/gitlab-org/gitlab-runner) 11.9 or newer.
- GitLab 12.0 and later requires [Redis](https://redis.io/) 3.2 or higher.
-- [Pipelines for merged results](../index.md#pipelines-for-merged-results-premium) must be
- **configured properly**.
- Each merge train can run a maximum of **twenty** pipelines in parallel.
If more than twenty merge requests are added to the merge train, the merge requests
will be queued until a slot in the merge train is free. There is no limit to the
number of merge requests that can be queued.
-<i class="fa fa-youtube-play youtube" aria-hidden="true"></i>
-Watch this video for a demonstration on [how parallel execution
-of Merge Trains can prevent commits from breaking the default
-branch](https://www.youtube.com/watch?v=D4qCqXgZkHQ).
+## Enable merge trains
-## How to add a merge request to a merge train
+To enable merge trains for your project:
-To add a merge request to a merge train:
+1. If you are on a self-managed GitLab instance, ensure the [feature flag](#merge-trains-feature-flag-premium-only) is set correctly.
+1. [Configure your CI/CD configuration file](../../index.md#configuring-pipelines-for-merge-requests)
+ so that the pipeline or individual jobs run for merge requests.
+1. Visit your project's **Settings > General** and expand **Merge requests**.
+1. Check **Merge pipelines will try to validate the post-merge result prior to merging**.
+1. Click **Save changes**.
+
+CAUTION: **Caution:**
+If you select the check box but don't configure your CI/CD to use
+pipelines for merge requests, your merge requests may become stuck in an
+unresolved state or your pipelines may be dropped.
+
+## Start a merge train
+
+To start a merge train:
1. Visit a merge request.
-1. Click the **Start/Add to merge train** button.
+1. Click the **Start merge train** button.
![Start merge train](img/merge_train_start_v12_0.png)
-## How to remove a merge request from a merge train
+Other merge requests can now be added to the train.
+
+## Add a merge request to a merge train
+
+To add a merge request to a merge train:
1. Visit a merge request.
-1. Click the **Remove from merge train** button.
+1. Click the **Add to merge train** button.
-![Cancel merge train](img/merge_train_cancel_v12_0.png)
+If pipelines are already running for the merge request, you cannot add the merge request
+to the train. Instead, you can schedule to add the merge request to a merge train **when the latest
+pipeline succeeds**.
-## How to view a merge request's current position on the merge train
+![Add to merge train when pipeline succeeds](img/merge_train_start_when_pipeline_succeeds_v12_0.png)
-After a merge request has been added to the merge train, the merge request's
-current position will be displayed under the pipeline widget:
+## Remove a merge request from a merge train
-![Merge train position indicator](img/merge_train_position_v12_0.png)
+1. Visit a merge request.
+1. Click the **Remove from merge train** button.
-## Start/Add to merge train when pipeline succeeds
+![Cancel merge train](img/merge_train_cancel_v12_0.png)
-You can add a merge request to a merge train only when the latest pipeline in the
-merge request is finished. While the pipeline is running or pending, you cannot add
-the merge request to a train because the current change of the merge request may
-be broken thus it could affect the following merge requests.
+If you want to add the merge request to a merge train again later, you can.
-In this case, you can schedule to add the merge request to a merge train **when the latest
-pipeline succeeds** (This pipeline is [Pipelines for merged results](../index.md), not Pipelines for merge train).
-You can see the following button instead of the regular **Start/Add to merge train**
-button while the latest pipeline is running.
+## View a merge request's current position on the merge train
-![Add to merge train when pipeline succeeds](img/merge_train_start_when_pipeline_succeeds_v12_0.png)
+After a merge request has been added to the merge train, the merge request's
+current position is displayed under the pipeline widget:
+
+![Merge train position indicator](img/merge_train_position_v12_0.png)
## Immediately merge a merge request with a merge train
-In the case where you have a high-priority merge request (for example, a critical patch) to be merged urgently,
-you can use **Merge Immediately** option for bypassing the merge train.
+If you have a high-priority merge request (for example, a critical patch) that must
+be merged urgently, you can bypass the merge train by using the **Merge Immediately** option.
This is the fastest option to get the change merged into the target branch.
![Merge Immediately](img/merge_train_immediate_merge_v12_6.png)
-However, every time you merge a merge request immediately, it could affect the
-existing merge train to be reconstructed, specifically, it regenerates expected
-merge commits and pipelines. This means, merging immediately essentially wastes
-CI resources. Because of these downsides, you will be asked to confirm before
-the merge is initiated:
-
-![Merge immediately confirmation dialog](img/merge_train_immediate_merge_confirmation_dialog_v12_6.png)
+CAUTION: **Caution:**
+Each time you merge a merge request immediately, the current merge train
+is recreated and all pipelines restart.
## Troubleshooting
### Merge request dropped from the merge train immediately
If a merge request is not mergeable (for example, it's WIP, there is a merge
-conflict, etc), your merge request will be dropped from the merge train automatically.
+conflict, etc.), your merge request will be dropped from the merge train automatically.
In these cases, the reason for dropping the merge request is in the **system notes**.
@@ -146,35 +157,30 @@ To check the reason:
### Merge When Pipeline Succeeds cannot be chosen
[Merge When Pipeline Succeeds](../../../../user/project/merge_requests/merge_when_pipeline_succeeds.md)
-is unavailable when
-[Pipelines for Merged Results is enabled](../index.md#enabling-pipelines-for-merged-results).
+is currently unavailable when Merge Trains are enabled.
-Follow [this issue](https://gitlab.com/gitlab-org/gitlab/issues/12267) to
-track progress on this issue.
+See [the related issue](https://gitlab.com/gitlab-org/gitlab/issues/12267)
+for more information.
### Merge Train Pipeline cannot be retried
A Merge Train pipeline cannot be retried because the merge request is dropped from the merge train upon failure. For this reason, the retry button does not appear next to the pipeline icon.
-In the case of pipeline failure, you should [re-enqueue](#how-to-add-a-merge-request-to-a-merge-train) the merge request to the merge train, which will then initiate a new pipeline.
-
-### Merge Train disturbs your workflow
+In the case of pipeline failure, you should [re-enqueue](#add-a-merge-request-to-a-merge-train) the merge request to the merge train, which will then initiate a new pipeline.
-First of all, please check if [merge immediately](#immediately-merge-a-merge-request-with-a-merge-train)
-is available as a workaround in your workflow. This is the most recommended
-workaround you'd be able to take immediately. If it's not available or acceptable,
-please read through this section.
+### Merge Trains feature flag **(PREMIUM ONLY)**
-Merge train is enabled by default when you enable [Pipelines for merged results](../index.md),
-however, you can disable this feature by setting the `:disable_merge_trains` feature flag to `enable`.
-When you disable this feature, all existing merge trains are aborted and
-the **Start/Add Merge Train** button no longer appears in merge requests.
+To enable and disable the Merge Trains feature, use the `:disable_merge_trains` feature flag.
To check if the feature flag is enabled on your GitLab instance,
-please ask an administrator to execute the following commands **(CORE ONLY)**:
+ask an administrator to execute the following commands:
```shell
> sudo gitlab-rails console # Login to Rails console of GitLab instance.
> Feature.enabled?(:disable_merge_trains) # Check if it's disabled or not.
> Feature.enable(:disable_merge_trains) # Disable Merge Trains.
+> Feature.disable(:disable_merge_trains) # Enable Merge Trains.
```
+
+When you disable this feature, all existing merge trains are cancelled and
+the **Start/Add to Merge Train** button no longer appears in merge requests.
diff --git a/doc/development/README.md b/doc/development/README.md
index 0620c96ba21..b505fa38618 100644
--- a/doc/development/README.md
+++ b/doc/development/README.md
@@ -34,7 +34,7 @@ description: 'Learn how to contribute to GitLab.'
- [Code review guidelines](code_review.md) for reviewing code and having code reviewed
- [Database review guidelines](database_review.md) for reviewing database-related changes and complex SQL queries, and having them reviewed
-- [Secure coding guidelines](https://gitlab.com/gitlab-com/gl-security/security-guidelines)
+- [Secure coding guidelines](secure_coding_guidelines.md)
- [Pipelines for the GitLab project](pipelines.md)
Complementary reads:
diff --git a/doc/development/api_graphql_styleguide.md b/doc/development/api_graphql_styleguide.md
index ea58b71a804..036eddd7c37 100644
--- a/doc/development/api_graphql_styleguide.md
+++ b/doc/development/api_graphql_styleguide.md
@@ -211,6 +211,15 @@ To ensure that we get consistent ordering, we will append an ordering on the pri
key, in descending order. This is usually `id`, so basically we will add `order(id: :desc)`
to the end of the relation. A primary key _must_ be available on the underlying table.
+#### Shortcut fields
+
+Sometimes it can seem easy to implement a "shortcut field", having the resolver return the first of a collection if no parameters are passed.
+These "shortcut fields" are discouraged because they create maintenance overhead.
+They need to be kept in sync with their canonical field, and deprecated or modified if their canonical field changes.
+Use the functionality the framework provides unless there is a compelling reason to do otherwise.
+
+For example, instead of `latest_pipeline`, use `pipelines(last: 1)`.
+
### Exposing permissions for a type
To expose permissions the current user has on a resource, you can call
diff --git a/doc/development/code_review.md b/doc/development/code_review.md
index c480db54705..52a0672259f 100644
--- a/doc/development/code_review.md
+++ b/doc/development/code_review.md
@@ -13,9 +13,13 @@ You are strongly encouraged to get your code **reviewed** by a
[reviewer](https://about.gitlab.com/handbook/engineering/workflow/code-review/#reviewer) as soon as
there is any code to review, to get a second opinion on the chosen solution and
implementation, and an extra pair of eyes looking for bugs, logic problems, or
-uncovered edge cases. The reviewer can be from a different team, but it is
-recommended to pick someone who knows the domain well. You can read more about the
-importance of involving reviewer(s) in the section on the responsibility of the author below.
+uncovered edge cases.
+
+The default approach is to choose a reviewer from your group or team for the first review.
+This is only a recommendation and the reviewer may be from a different team.
+However, it is recommended to pick someone who is a [domain expert](#domain-experts).
+
+You can read more about the importance of involving reviewer(s) in the section on the responsibility of the author below.
If you need some guidance (for example, it's your first merge request), feel free to ask
one of the [Merge request coaches](https://about.gitlab.com/company/team/).
@@ -32,16 +36,32 @@ widget. Reviewers can add their approval by [approving additionally](../user/pro
Getting your merge request **merged** also requires a maintainer. If it requires
more than one approval, the last maintainer to review and approve it will also merge it.
+### Domain experts
+
+Domain experts are team members who have substantial experience with a specific technology, product feature or area of the codebase. Team members are encouraged to self-identify as domain experts and add it to their [team profile](https://gitlab.com/gitlab-com/www-gitlab-com/-/blob/master/data/team.yml)
+
+When self-identifying as a domain expert, it is recommended to assign the MR changing the `team.yml` to be merged by an already established Domain Expert or a corresponding Engineering Manager.
+
+We make the following assumption with regards to automatically being considered a domain expert:
+
+- Team members working in a specific stage/group (e.g. create: source code) are considered domain experts for that area of the app they work on
+- Team members working on a specific feature (e.g. search) are considered domain experts for that feature
+
+We default to assigning reviews to team members with domain expertise.
+When a suitable [domain expert](#domain-experts) isn't available, you can choose any team member to review the MR, or simply follow the [Reviewer roulette](#reviewer-roulette) recommendation.
+
+Team members' domain expertise can be viewed on the [engineering projects](https://about.gitlab.com/handbook/engineering/projects/) page or on the [GitLab team page](https://about.gitlab.com/company/team/).
+
### Reviewer roulette
The [Danger bot](dangerbot.md) randomly picks a reviewer and a maintainer for
each area of the codebase that your merge request seems to touch. It only makes
-recommendations - feel free to override it if you think someone else is a better
+**recommendations** and you should override it if you think someone else is a better
fit!
It picks reviewers and maintainers from the list at the
[engineering projects](https://about.gitlab.com/handbook/engineering/projects/)
-page, with these behaviours:
+page, with these behaviors:
1. It will not pick people whose [GitLab status](../user/profile/index.md#current-status)
contains the string 'OOO'.
@@ -56,7 +76,7 @@ page, with these behaviours:
As described in the section on the responsibility of the maintainer below, you
are recommended to get your merge request approved and merged by maintainer(s)
-from teams other than your own.
+ with [domain expertise](#domain-experts).
1. If your merge request includes backend changes [^1], it must be
**approved by a [backend maintainer](https://about.gitlab.com/handbook/engineering/projects/#gitlab_maintainers_backend)**.
@@ -103,13 +123,13 @@ To reach the required level of confidence in their solution, an author is expect
to involve other people in the investigation and implementation processes as
appropriate.
-They are encouraged to reach out to domain experts to discuss different solutions
+They are encouraged to reach out to [domain experts](#domain-experts) to discuss different solutions
or get an implementation reviewed, to product managers and UX designers to clear
up confusion or verify that the end result matches what they had in mind, to
database specialists to get input on the data model or specific queries, or to
any other developer to get an in-depth review of the solution.
-If an author is unsure if a merge request needs a domain expert's opinion, that's
+If an author is unsure if a merge request needs a [domain experts's](#domain-experts) opinion, that's
usually a pretty good sign that it does, since without it the required level of
confidence in their solution will not have been reached.
@@ -142,9 +162,8 @@ that it meets all requirements, you should:
- Click the Approve button.
- Advise the author their merge request has been reviewed and approved.
-- Assign the merge request to a maintainer. [Reviewer roulette](#reviewer-roulette)
-should have made a suggestion, but feel free to override if someone else is a
-better choice.
+- Assign the merge request to a maintainer. Default to assigning it to a maintainer with [domain expertise](#domain-experts),
+however, if one isn't available or you think the merge request doesn't need a review by a [domain expert](#domain-experts), feel free to follow the [Reviewer roulette](#reviewer-roulette) suggestion.
### The responsibility of the maintainer
@@ -159,20 +178,17 @@ Since a maintainer's job only depends on their knowledge of the overall GitLab
codebase, and not that of any specific domain, they can review, approve, and merge
merge requests from any team and in any product area.
-In fact, authors are encouraged to get their merge requests merged by maintainers
-from teams other than their own, to ensure that all code across GitLab is consistent
-and can be easily understood by all contributors, from both inside and outside the
-company, without requiring team-specific expertise.
-
Maintainers will do their best to also review the specifics of the chosen solution
-before merging, but as they are not necessarily domain experts, they may be poorly
+before merging, but as they are not necessarily [domain experts](#domain-experts), they may be poorly
placed to do so without an unreasonable investment of time. In those cases, they
-will defer to the judgment of the author and earlier reviewers and involved domain
-experts, in favor of focusing on their primary responsibilities.
+will defer to the judgment of the author and earlier reviewers, in favor of focusing on their primary responsibilities.
+
+If a maintainer feels that an MR is substantial enough that it warrants a review from a [domain expert](#domain-experts),
+and it is unclear whether a domain expert have been involved in the reviews to date,
+they may request a [domain expert's](#domain-experts) review before merging the MR.
If a developer who happens to also be a maintainer was involved in a merge request
-as a domain expert and/or reviewer, it is recommended that they are not also picked
-as the maintainer to ultimately approve and merge it.
+as a reviewer, it is recommended that they are not also picked as the maintainer to ultimately approve and merge it.
Maintainers should check before merging if the merge request is approved by the
required approvers.
@@ -255,11 +271,13 @@ first time.
### Assigning a merge request for a review
-If you want to have your merge request reviewed, you can assign it to any reviewer. The list of reviewers can be found on [Engineering projects](https://about.gitlab.com/handbook/engineering/projects/) page.
+When you are ready to have your merge request reviewed,
+you should default to assigning it to a reviewer from your group or team for the first review,
+however, you can also assign it to any reviewer. The list of reviewers can be found on [Engineering projects](https://about.gitlab.com/handbook/engineering/projects/) page.
You can also use `workflow::ready for review` label. That means that your merge request is ready to be reviewed and any reviewer can pick it. It is recommended to use that label only if there isn't time pressure and make sure the merge request is assigned to a reviewer.
-When your merge request was reviewed and can be passed to a maintainer you can either pick a specific maintainer or use a label `ready for merge`.
+When your merge request was reviewed and can be passed to a maintainer, you should default to choosing a maintainer with [domain expertise](#domain-experts), and otherwise follow the Reviewer Roulette recommendation or use the label `ready for merge`.
It is responsibility of the author of a merge request that the merge request is reviewed. If it stays in `ready for review` state too long it is recommended to assign it to a specific reviewer.
diff --git a/doc/development/documentation/site_architecture/index.md b/doc/development/documentation/site_architecture/index.md
index bd870399978..56dd3821b1c 100644
--- a/doc/development/documentation/site_architecture/index.md
+++ b/doc/development/documentation/site_architecture/index.md
@@ -20,29 +20,27 @@ from where content is sourced, the `gitlab-docs` project, and the published outp
```mermaid
graph LR
- A[gitlab-foss/doc]
- B[gitlab/doc]
- C[gitlab-runner/docs]
- D[omnibus-gitlab/doc]
- E[charts/doc]
- F[gitlab-docs]
- A --> F
- B --> F
- C --> F
- D --> F
- E --> F
- F -- Build pipeline --> G
- G[docs.gitlab.com]
- H[/ce/]
- I[/ee/]
- J[/runner/]
- K[/omnibus/]
- L[/charts/]
- G --> H
- G --> I
- G --> J
- G --> K
- G --> L
+ A[gitlab/doc]
+ B[gitlab-runner/docs]
+ C[omnibus-gitlab/doc]
+ D[charts/doc]
+ E[gitlab-docs]
+ A --> E
+ B --> E
+ C --> E
+ D --> E
+ E -- Build pipeline --> F
+ F[docs.gitlab.com]
+ G[/ce/]
+ H[/ee/]
+ I[/runner/]
+ J[/omnibus/]
+ K[/charts/]
+ F --> H
+ F --> I
+ F --> J
+ F --> K
+ H -- symlink --> G
```
You will not find any GitLab docs content in the `gitlab-docs` repository.
diff --git a/doc/development/elasticsearch.md b/doc/development/elasticsearch.md
index feff0ba7c8a..758cecce315 100644
--- a/doc/development/elasticsearch.md
+++ b/doc/development/elasticsearch.md
@@ -7,13 +7,7 @@ the [Elasticsearch integration documentation](../integration/elasticsearch.md#en
## Deep Dive
-In June 2019, Mario de la Ossa hosted a [Deep Dive] on GitLab's [Elasticsearch integration] to share his domain specific knowledge with anyone who may work in this part of the code base in the future. You can find the [recording on YouTube], and the slides on [Google Slides] and in [PDF]. Everything covered in this deep dive was accurate as of GitLab 12.0, and while specific details may have changed since then, it should still serve as a good introduction.
-
-[Deep Dive]: https://gitlab.com/gitlab-org/create-stage/issues/1
-[Elasticsearch integration]: ../integration/elasticsearch.md
-[recording on YouTube]: https://www.youtube.com/watch?v=vrvl-tN2EaA
-[Google Slides]: https://docs.google.com/presentation/d/1H-pCzI_LNrgrL5pJAIQgvLX8Ji0-jIKOg1QeJQzChug/edit
-[PDF]: https://gitlab.com/gitlab-org/create-stage/uploads/c5aa32b6b07476fa8b597004899ec538/Elasticsearch_Deep_Dive.pdf
+In June 2019, Mario de la Ossa hosted a [Deep Dive](https://gitlab.com/gitlab-org/create-stage/issues/1) on GitLab's [Elasticsearch integration](../integration/elasticsearch.md) to share his domain specific knowledge with anyone who may work in this part of the code base in the future. You can find the [recording on YouTube](https://www.youtube.com/watch?v=vrvl-tN2EaA), and the slides on [Google Slides](https://docs.google.com/presentation/d/1H-pCzI_LNrgrL5pJAIQgvLX8Ji0-jIKOg1QeJQzChug/edit) and in [PDF](https://gitlab.com/gitlab-org/create-stage/uploads/c5aa32b6b07476fa8b597004899ec538/Elasticsearch_Deep_Dive.pdf). Everything covered in this deep dive was accurate as of GitLab 12.0, and while specific details may have changed since then, it should still serve as a good introduction.
## Supported Versions
@@ -60,12 +54,15 @@ Please see the `sha_tokenizer` explanation later below for an example.
#### `code_analyzer`
-Used when indexing a blob's filename and content. Uses the `whitespace` tokenizer and the filters: `code`, `edgeNGram_filter`, `lowercase`, and `asciifolding`
+Used when indexing a blob's filename and content. Uses the `whitespace` tokenizer and the filters: [`code`](#code), [`edgeNGram_filter`](#edgengram_filter), `lowercase`, and `asciifolding`
The `whitespace` tokenizer was selected in order to have more control over how tokens are split. For example the string `Foo::bar(4)` needs to generate tokens like `Foo` and `bar(4)` in order to be properly searched.
Please see the `code` filter for an explanation on how tokens are split.
+NOTE: **Known Issues**:
+Currently the [Elasticsearch code_analyzer doesn't account for all code cases](../integration/elasticsearch.md#known-issues).
+
#### `code_search_analyzer`
Not directly used for indexing, but rather used to transform a search input. Uses the `whitespace` tokenizer and the `lowercase` and `asciifolding` filters.
diff --git a/doc/development/integrations/secure.md b/doc/development/integrations/secure.md
index 69128cfb625..b38e45778fb 100644
--- a/doc/development/integrations/secure.md
+++ b/doc/development/integrations/secure.md
@@ -233,6 +233,12 @@ describes the Secure report format version.
The `vulnerabilities` field of the report is an array of vulnerability objects.
+#### ID
+
+The `id` field is the unique identifier of the vulnerability.
+It is used to reference a fixed vulnerability from a [remediation objects](#remediations).
+We recommend that you generate a UUID and use it as the `id` field's value.
+
#### Category
The value of the `category` field matches the report type:
@@ -467,6 +473,15 @@ The `remediations` field of the report is an array of remediation objects.
Each remediation describes a patch that can be applied to automatically fix
a set of vulnerabilities.
-Currently, remediations rely on a deprecated field named `cve` to reference vulnerabilities,
-so it is recommended not to use them until a new format has been defined.
-See [issue #36777](https://gitlab.com/gitlab-org/gitlab/issues/36777).
+#### Summary
+
+The `summary` field is an overview of how the vulnerabilities can be fixed.
+
+#### Fixed vulnerabilities
+
+The `fixes` field is an array of objects that reference the vulnerabilities fixed by the
+remediation. `fixes[].id` contains a fixed vulnerability's unique identifier.
+
+#### Diff
+
+The `diff` field is a base64-encoded remediation code diff, compatible with [`git apply`](https://git-scm.com/docs/git-format-patch#_discussion).
diff --git a/doc/development/reusing_abstractions.md b/doc/development/reusing_abstractions.md
index fce144f8dc2..8711bac69e0 100644
--- a/doc/development/reusing_abstractions.md
+++ b/doc/development/reusing_abstractions.md
@@ -127,6 +127,8 @@ Everything in `lib/api`.
Everything that resides in `app/services`.
+In Service classes the use of `execute` and `#execute` is preferred over `call` and `#call`.
+
#### ServiceResponse
Service classes usually have an `execute` method, which can return a
diff --git a/doc/development/secure_coding_guidelines.md b/doc/development/secure_coding_guidelines.md
new file mode 100644
index 00000000000..0367db8939a
--- /dev/null
+++ b/doc/development/secure_coding_guidelines.md
@@ -0,0 +1,306 @@
+# Security Guidelines
+
+This document contains descriptions and guidelines for addressing security
+vulnerabilities commonly identified in the GitLab codebase. They are intended
+to help developers identify potential security vulnerabilities early, with the
+goal of reducing the number of vulnerabilities released over time.
+
+**Contributing**
+
+If you would like to contribute to one of the existing documents, or add
+guidelines for a new vulnerability type, please open an MR! Please try to
+include links to examples of the vulnerability found, and link to any resources
+used in defined mitigations. If you have questions or when ready for a review,
+please ping `gitlab-com/gl-security/appsec`.
+
+## Permissions
+
+### Description
+
+Application permissions are used to determine who can access what and what actions they can perform.
+For more information about the permission model at GitLab, please see [the GitLab permissions guide](permissions.md) or the [EE docs on permissions](../../ee/user/permissions.md).
+
+### Impact
+
+Improper permission handling can have significant impacts on the security of an application.
+Some situations may reveal [sensitive data](https://gitlab.com/gitlab-com/gl-infra/production/issues/477) or allow a malicious actor to perform [harmful actions](https://gitlab.com/gitlab-org/gitlab/issues/8180).
+The overall impact depends heavily on what resources can be accessed or modified improperly.
+
+A common vulnerability when permission checks are missing is called [IDOR](https://www.owasp.org/index.php/Testing_for_Insecure_Direct_Object_References_(OTG-AUTHZ-004)) for Insecure Direct Object References.
+
+### When to Consider
+
+Each time you implement a new feature/endpoint, whether it is at UI, API or GraphQL level.
+
+### Mitigations
+
+**Start by writing tests** around permissions: unit and feature specs should both include tests based around permissions
+
+- Fine-grained, nitty-gritty specs for permissions are good: it is ok to be verbose here
+ - Make assertions based on the actors and objects involved: can a user or group or XYZ perform this action on this object?
+ - Consider defining them upfront with stakeholders, particularly for the edge cases
+- Do not forget **abuse cases**: write specs that **make sure certain things can't happen**
+ - A lot of specs are making sure things do happen and coverage percentage doesn't take into account permissions as same piece of code is used.
+ - Make assertions that certain actors cannot perform actions
+- Naming convention to ease auditability: to be defined, e.g. a subfolder containing those specific permission tests or a `#permissions` block
+
+Be careful to **also test [visibility levels](https://gitlab.com/gitlab-org/gitlab-foss/-/blob/master/doc/development/permissions.md#feature-specific-permissions)** and not only project access rights.
+
+Some example of well implemented access controls and tests:
+
+1. [example1](https://dev.gitlab.org/gitlab/gitlab-ee/merge_requests/710/diffs?diff_id=13750#af40ef0eaae3c1e018809e1d88086e32bccaca40_43_43)
+1. [example2](https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/2511/diffs#ed3aaab1510f43b032ce345909a887e5b167e196_142_155)
+1. [example3](https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/3170/diffs?diff_id=17494)
+
+**NB:** any input from development team is welcome, e.g. about rubocop rules.
+
+## Regular Expressions guidelines
+
+### Anchors / Multi line
+
+Unlike other programming languages (e.g. Perl or Python) Regular Expressions are matching multi-line by default in Ruby. Consider the following example in Python:
+
+```python
+import re
+text = "foo\nbar"
+matches = re.findall("^bar$",text)
+print(matches)
+```
+
+The Python example will output an emtpy array (`[]`) as the matcher considers the whole string `foo\nbar` including the newline (`\n`). In contrast Ruby's Regular Expression engine acts differently:
+
+```ruby
+text = "foo\nbar"
+p text.match /^bar$/
+```
+
+The output of this example is `#<MatchData "bar">`, as Ruby treats the input `text` line by line. In order to match the whole __string__ the Regex anchors `\A` and `\z` should be used according to [Rubular](https://rubular.com/).
+
+#### Impact
+
+This Ruby Regex speciality can have security impact, as often regular expressions are used for validations or to impose restrictions on user-input.
+
+#### Examples
+
+GitLab specific examples can be found [here](https://gitlab.com/gitlab-org/gitlab/issues/36029#note_251262187) and [there](https://gitlab.com/gitlab-org/gitlab/issues/33569).
+
+Another example would be this fictional Ruby On Rails controller:
+
+```ruby
+class PingController < ApplicationController
+ def ping
+ if params[:ip] =~ /^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$/
+ render :text => `ping -c 4 #{params[:ip]}`
+ else
+ render :text => "Invalid IP"
+ end
+ end
+end
+```
+
+Here `params[:ip]` should not contain anything else but numbers and dots. However this restriction can be easily bypassed as the Regex anchors `^` and `$` are being used. Ultimately this leads to a shell command injection in `ping -c 4 #{params[:ip]}` by using newlines in `params[:ip]`.
+
+#### Mitigation
+
+In most cases the anchors `\A` for beginning of text and `\z` for end of text should be used instead of `^` and `$`.
+
+### Further Links
+
+- [Rubular](https://rubular.com/) is a nice online tool to fiddle with Ruby Regexps.
+
+## Server Side Request Forgery (SSRF)
+
+### Description
+
+A [Server-side Request Forgery (SSRF)][1] is an attack in which an attacker
+is able coerce a application into making an outbound request to an unintended
+resource. This resource is usually internal. In GitLab, the connection most
+commonly uses HTTP, but an SSRF can be performed with any protocol, such as
+Redis or SSH.
+
+With an SSRF attack, the UI may or may not show the response. The latter is
+called a Blind SSRF. While the impact is reduced, it can still be useful for
+attackers, especially for mapping internal network services as part of recon.
+
+[1]: https://www.hackerone.com/blog-How-To-Server-Side-Request-Forgery-SSRF
+
+### Impact
+
+The impact of an SSRF can vary, depending on what the application server
+can communicate with, how much the attacker can control of the payload, and
+if the response is returned back to the attacker. Examples of impact that
+have been reported to GitLab include:
+
+- Network mapping of internal services
+ - This can help an attacker gather information about internal services
+ that could be used in further attacks. [More details](https://gitlab.com/gitlab-org/gitlab-foss/issues/51327).
+- Reading internal services, including cloud service metadata.
+ - The latter can be a serious problem, because an attacker can obtain keys that allow control of the victim's cloud infrastructure. (This is also a good reason
+ to give only necessary privileges to the token.). [More details](https://gitlab.com/gitlab-org/gitlab-foss/issues/51490).
+- When combined with CRLF vulnerability, remote code execution. [More details](https://gitlab.com/gitlab-org/gitlab-foss/issues/41293)
+
+### When to Consider
+
+- When the application makes any outbound connection
+
+### Mitigations
+
+In order to mitigate SSRF vulnerabilities, it is necessary to validate the destination of the outgoing request, especially if it includes user-supplied information.
+
+The preferred SSRF mitigations within GitLab are:
+
+1. Only connect to known, trusted domains/IP addresses.
+1. Use the [GitLab::HTTP](#gitlab-http-library) library
+1. Implement [feature-specific mitigations](#feature-specific-mitigations)
+
+#### GitLab HTTP Library
+
+The [GitLab::HTTP][2] wrapper library has grown to include mitigations for all of the GitLab-known SSRF vectors. It is also configured to respect the
+`Outbound requests` options that allow instance administrators to block all internal connections, or limit the networks to which connections can be made.
+
+In some cases, it has been possible to configure GitLab::HTTP as the HTTP
+connection library for 3rd-party gems. This is preferrable over re-implementing
+the mitigations for a new feature.
+
+- [More details](https://dev.gitlab.org/gitlab/gitlabhq/merge_requests/2530/diffs)
+
+[2]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/http.rb
+
+#### Feature-specific Mitigations
+
+For situtions in which a whitelist or GitLab:HTTP cannot be used, it will be necessary to implement mitigations directly in the feature. It is best to validate the destination IP addresses themselves, not just domain names, as DNS can be controlled by the attacker. Below are a list of mitigations that should be implemented.
+
+**Important Note:** There are many tricks to bypass common SSRF validations. If feature-specific mitigations are necessary, they should be reviewed by the AppSec team, or a developer who has worked on SSRF mitigations previously.
+
+- Block connections to all localhost addresses
+ - `127.0.0.1/8` (IPv4 - note the subnet mask)
+ - `::1` (IPv6)
+- Block connections to networks with private addressing (RFC 1918)
+ - `10.0.0.0/8`
+ - `172.16.0.0/12`
+ - `192.168.0.0/24`
+- Block connections to link-local addresses (RFC 3927)
+ - `169.254.0.0/16`
+ - In particular, for GCP: `metadata.google.internal` -> `169.254.169.254`
+- For HTTP connections: Disable redirects or validate the redirect destination
+- To mitigate DNS rebinding attacks, validate and use the first IP address received
+
+See [url_blocker_spec.rb][3] for examples of SSRF payloads
+
+[3]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/spec/lib/gitlab/url_blocker_spec.rb
+
+## XSS guidelines
+
+### Description
+
+Cross site scripting (XSS) is an issue where malicious JavaScript code gets injected into a trusted web application and executed in a client's browser. The input is intended to be data, but instead gets treated as code by the browser.
+
+XSS issues are commonly classified in three categories, by their delivery method:
+
+- [Persistent XSS](https://owasp.org/www-community/Types_of_Cross-Site_Scripting#stored-xss-aka-persistent-or-type-i)
+- [Reflected XSS](https://owasp.org/www-community/Types_of_Cross-Site_Scripting#reflected-xss-aka-non-persistent-or-type-ii)
+- [DOM XSS](https://owasp.org/www-community/Types_of_Cross-Site_Scripting#dom-based-xss-aka-type-0)
+
+### Impact
+
+The injected client-side code is executed on the victim's browser in the context of their current session. This means the attacker could perform any same action the victim would normally be able to do through a browser. The attacker would also have the ability to:
+
+- [log victim keystrokes](https://youtu.be/2VFavqfDS6w?t=1367)
+- launch a network scan from the victim's browser
+- potentially [obtain the victim's session tokens](https://youtu.be/2VFavqfDS6w?t=739)
+- perform actions that lead to data loss/theft or account takeover
+
+Much of the impact is contingent upon the function of the application and the capabilities of the victim's session. For further impact possibilities, please check out [the beef project](https://beefproject.com/).
+
+### When to consider?
+
+When user submitted data is included in responses to end users, which is just about anywhere.
+
+### Mitigation
+
+In most situations, a two-step solution can be utilized: input validation and output encoding in the appropriate context.
+
+#### Input validation
+
+- [Input Validation](https://youtu.be/2VFavqfDS6w?t=7489)
+
+##### Setting expectations
+
+For any and all input fields, ensure to define expectations on the type/format of input, the contents, [size limits](https://youtu.be/2VFavqfDS6w?t=7582), the context in which it will be output. It's important to work with both security and product teams to determine what is considered acceptable input.
+
+##### Validate input
+
+- Treat all user input as untrusted.
+- Based on the expectations you [defined above](#setting-expectations):
+ - Validate the [input size limits](https://youtu.be/2VFavqfDS6w?t=7582).
+ - Validate the input using a [whitelist approach](https://youtu.be/2VFavqfDS6w?t=7816) to only allow characters through which you are expecting to receive for the field.
+ - Input which fails validation should be **rejected**, and not sanitized.
+
+Note that blacklists should be avoided, as it is near impossible to block all [variations of XSS](https://www.owasp.org/index.php/XSS_Filter_Evasion_Cheat_Sheet).
+
+#### Output encoding
+
+Once you've [determined when and where](#setting-expectations) the user submitted data will be output, it's important to encode it based on the appropriate context. For example:
+
+- Content placed inside HTML elements need to be [HTML entity encoded](https://cheatsheetseries.owasp.org/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.html#rule-1---html-escape-before-inserting-untrusted-data-into-html-element-content).
+- Content placed into a JSON response needs to be [JSON encoded](https://cheatsheetseries.owasp.org/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.html#rule-31---html-escape-json-values-in-an-html-context-and-read-the-data-with-jsonparse).
+- Content placed inside [HTML URL GET parameters](https://youtu.be/2VFavqfDS6w?t=3494) need to be [URL-encoded](https://cheatsheetseries.owasp.org/cheatsheets/Cross_Site_Scripting_Prevention_Cheat_Sheet.html#rule-5---url-escape-before-inserting-untrusted-data-into-html-url-parameter-values)
+- [Additional contexts may require context-specific encoding](https://youtu.be/2VFavqfDS6w?t=2341).
+
+### Additional info
+
+#### Mitigating XSS in Rails
+
+- [XSS Defense in Rails](https://youtu.be/2VFavqfDS6w?t=2442)
+- [XSS Defense with HAML](https://youtu.be/2VFavqfDS6w?t=2796)
+- [Validating Untrusted URLs in Ruby](https://youtu.be/2VFavqfDS6w?t=3936)
+- [RoR Model Validators](https://youtu.be/2VFavqfDS6w?t=7636)
+
+#### GitLab specific libraries for mitigating XSS
+
+##### Vue
+
+- [isSafeURL](https://gitlab.com/gitlab-org/gitlab/-/blob/v12.7.5-ee/app/assets/javascripts/lib/utils/url_utility.js#L190-207)
+
+#### Content Security Policy
+
+- [Content Security Policy](https://www.youtube.com/watch?v=2VFavqfDS6w&t=12991s)
+- [Use nonce-based Content Security Policy for inline JavaScript](https://gitlab.com/gitlab-org/gitlab-foss/issues/65330)
+
+#### Free form input fields
+
+##### Sanitization
+
+- [HTML Sanitization](https://youtu.be/2VFavqfDS6w?t=5075)
+- [DOMPurify](https://youtu.be/2VFavqfDS6w?t=5381)
+
+##### `iframe` sandboxes
+
+- [iframe sandboxing](https://youtu.be/2VFavqfDS6w?t=7043)
+
+### Select examples of past XSS issues affecting GitLab
+
+- [Stored XSS in user status](https://gitlab.com/gitlab-org/gitlab-foss/issues/55320)
+
+### Developer Training
+
+- [Introduction to XSS](https://www.youtube.com/watch?v=PXR8PTojHmc&t=7785s)
+- [Reflected XSS](https://youtu.be/2VFavqfDS6w?t=603s)
+- [Persistent XSS](https://youtu.be/2VFavqfDS6w?t=643)
+- [DOM XSS](https://youtu.be/2VFavqfDS6w?t=5871)
+- [XSS in depth](https://www.youtube.com/watch?v=2VFavqfDS6w&t=111s)
+- [XSS Defense](https://youtu.be/2VFavqfDS6w?t=1685)
+- [XSS Defense in Rails](https://youtu.be/2VFavqfDS6w?t=2442)
+- [XSS Defense with HAML](https://youtu.be/2VFavqfDS6w?t=2796)
+- [JavaScript URLs](https://youtu.be/2VFavqfDS6w?t=3274)
+- [URL encoding context](https://youtu.be/2VFavqfDS6w?t=3494)
+- [Validating Untrusted URLs in Ruby](https://youtu.be/2VFavqfDS6w?t=3936)
+- [HTML Sanitization](https://youtu.be/2VFavqfDS6w?t=5075)
+- [DOMPurify](https://youtu.be/2VFavqfDS6w?t=5381)
+- [Safe Client-side JSON Handling](https://youtu.be/2VFavqfDS6w?t=6334)
+- [iframe sandboxing](https://youtu.be/2VFavqfDS6w?t=7043)
+- [Input Validation](https://youtu.be/2VFavqfDS6w?t=7489)
+- [Validate size limits](https://youtu.be/2VFavqfDS6w?t=7582)
+- [RoR model validators](https://youtu.be/2VFavqfDS6w?t=7636)
+- [Whitelist input validation](https://youtu.be/2VFavqfDS6w?t=7816)
+- [Content Security Policy](https://www.youtube.com/watch?v=2VFavqfDS6w&t=12991s)
diff --git a/doc/development/testing_guide/end_to_end/best_practices.md b/doc/development/testing_guide/end_to_end/best_practices.md
index fbb2a17bef1..57cfcf34726 100644
--- a/doc/development/testing_guide/end_to_end/best_practices.md
+++ b/doc/development/testing_guide/end_to_end/best_practices.md
@@ -1,43 +1,59 @@
-# Best practices when writing end-to-end tests
+# End-to-end testing Best Practices
-## Avoid using a GUI when it's not required
+NOTE: **Note:**
+This is an tailored extension of the Best Practices [found in the testing guide](../best_practices.md).
-The majority of the end-to-end tests require some state to be built in the application for the tests to happen.
+## Prefer API over UI
-A good example is a user being logged in as a pre-condition for testing the feature.
+The end-to-end testing framework has the ability to fabricate its resources on a case-by-case basis.
+Resources should be fabricated via the API wherever possible.
-But if the login feature is already covered with end-to-end tests through the GUI, there is no reason to perform such an expensive task to test the functionality of creating a project, or importing a repo, even if these features depend on a user being logged in. Let's see an example to make things clear.
+We can save both time and money by fabricating resources that our test will need via the API.
-Let's say that, on average, the process to perform a successful login through the GUI takes 2 seconds.
+[Learn more](resources.md) about resources.
-Now, realize that almost all tests need the user to be logged in, and that we need every test to run in isolation, meaning that tests cannot interfere with each other. This would mean that for every test the user needs to log in, and "waste 2 seconds".
+## Avoid superfluous expectations
-Now, multiply the number of tests per 2 seconds, and as your test suite grows, the time to run it grows with it, and this is not sustainable.
+To keep tests lean, it is important that we only test what we need to test.
-An alternative to perform a login in a cheaper way would be having an endpoint (available only for testing) where we could pass the user's credentials as encrypted values as query strings, and then we would be redirected to the logged in home page if the credentials are valid. Let's say that, on average, this process takes only 200 milliseconds.
+Ensure that you do not add any `expect()` statements that are unrelated to what needs to be tested.
-You see the point right?
+For example:
-Performing a login through the GUI for every test would cost a lot in terms of tests' execution.
-
-And there is another reason.
-
-Let's say that you don't follow the above suggestion, and depend on the GUI for the creation of every application state in order to test a specific feature. In this case we could be talking about the **Issues** feature, that depends on a project to exist, and the user to be logged in.
-
-What would happen if there was a bug in the project creation page, where the 'Create' button is disabled, not allowing for the creation of a project through the GUI, but the API logic is still working?
-
-In this case, instead of having only the project creation test failing, we would have many tests that depend on a project to be failing too.
+```ruby
+#=> Good
+Flow::Login.sign_in
+Page::Main::Menu.perform do |menu|
+ expect(menu).to be_signed_in
+end
-But, if we were following the best practices, only one test would be failing, and tests for other features that depend on a project to exist would continue to pass, since they could be creating the project behind the scenes interacting directly with the public APIs, ensuring a more reliable metric of test failure rate.
+#=> Bad
+Flow::Login.sign_in(as: user)
+Page::Main::Menu.perform do |menu|
+ expect(menu).to be_signed_in
+ expect(page).to have_content(user.name) #=> we already validated being signed in. redundant.
+ expect(menu).to have_element(:nav_bar) #=> likely unnecessary. already validated in lower-level. test doesn't call for validating this.
+end
-Finally, interacting with the application only by its GUI generates a higher rate of test flakiness, and we want to avoid that at max.
+#=> Good
+issue = Resource::Issue.fabricate_via_api! do |issue|
+ issue.name = 'issue-name'
+end
-**The takeaways here are:**
+Project::Issues::Index.perform do |index|
+ expect(index).to have_issue(issue)
+end
-- Building state through the GUI is time consuming and it's not sustainable as the test suite grows.
-- When depending only on the GUI to create the application's state and tests fail due to front-end issues, we can't rely on the test failures rate, and we generate a higher rate of test flakiness.
+#=> Bad
+issue = Resource::Issue.fabricate_via_api! do |issue|
+ issue.name = 'issue-name'
+end
-Now that we are aware of all of it, [let's go create some tests](quick_start_guide.md).
+Project::Issues::Index.perform do |index|
+ expect(index).to have_issue(issue)
+ expect(page).to have_content(issue.name) #=> page content check is redundant as the issue was already validated in the line above.
+end
+```
## Prefer to split tests across multiple files
@@ -54,17 +70,18 @@ In summary:
- **Do**: Split tests across separate files, unless the tests share expensive setup.
- **Don't**: Put new tests in an existing file without considering the impact on parallelization.
-## Limit the use of `before(:all)` and `after` hooks
+## Limit the use of the UI in `before(:context)` and `after` hooks
-Limit the use of `before(:all)` hook to perform setup tasks with only API calls, non UI operations
-or basic UI operations such as login.
+Limit the use of `before(:context)` hooks to perform setup tasks with only API calls,
+non-UI operations, or basic UI operations such as login.
-We use [`capybara-screenshot`](https://github.com/mattheworiordan/capybara-screenshot) library to automatically save screenshots on failures.
-This library [saves the screenshots in the RSpec's `after` hook](https://github.com/mattheworiordan/capybara-screenshot/blob/master/lib/capybara-screenshot/rspec.rb#L97).
-[If there is a failure in `before(:all)`, the `after` hook is not called](https://github.com/rspec/rspec-core/pull/2652/files#diff-5e04af96d5156e787f28d519a8c99615R148) and so the screenshots are not saved.
+We use [`capybara-screenshot`](https://github.com/mattheworiordan/capybara-screenshot) library to automatically save a screenshot on
+failure.
-Given this fact, we should limit the use of `before(:all)` to only those operations where a screenshot is not
-necessary in case of failure and QA logs would be enough for debugging.
+`capybara-screenshot` [saves the screenshot in the RSpec's `after` hook](https://github.com/mattheworiordan/capybara-screenshot/blob/master/lib/capybara-screenshot/rspec.rb#L97).
+[If there is a failure in `before(:context)`, the `after` hook is not called](https://github.com/rspec/rspec-core/pull/2652/files#diff-5e04af96d5156e787f28d519a8c99615R148) and so the screenshot is not saved.
+
+Given this fact, we should limit the use of `before(:context)` to only those operations where a screenshot is not needed.
Similarly, the `after` hook should only be used for non-UI operations. Any UI operations in `after` hook in a test file
would execute before the `after` hook that takes the screenshot. This would result in moving the UI status away from the
@@ -72,16 +89,11 @@ point of failure and so the screenshot would not be captured at the right moment
## Ensure tests do not leave the browser logged in
-All QA tests expect to be able to log in at the start of the test.
-
-That's not possible if a test leaves the browser logged in when it finishes. Normally this isn't a
-problem because [Capybara resets the session after each test](https://github.com/teamcapybara/capybara/blob/9ebc5033282d40c73b0286e60217515fd1bb0b5d/lib/capybara/rspec.rb#L18).
-But Capybara does that in an `after` block, so when a test logs in within an `after(:context)` block,
-the browser returns to a logged in state *after* Capybara had logged it out. And so the next test will fail.
+All tests expect to be able to log in at the start of the test.
For an example see: <https://gitlab.com/gitlab-org/gitlab/issues/34736>
-Ideally, any actions performed in an `after(:context)` (or [`before(:context)`](#limit-the-use-of-beforeall-and-after-hooks)) block would be performed via the API. But if it's necessary to do so via the UI (e.g., if API functionality doesn't exist), make sure to log out at the end of the block.
+Ideally, any actions performed in an `after(:context)` (or [`before(:context)`](#limit-the-use-of-the-ui-in-beforecontext-and-after-hooks)) block would be performed via the API. But if it's necessary to do so via the UI (e.g., if API functionality doesn't exist), make sure to log out at the end of the block.
```ruby
after(:all) do
@@ -100,3 +112,30 @@ We don't run tests that require Administrator access against our Production envi
When you add a new test that requires Administrator access, apply the RSpec metadata `:requires_admin` so that the test will not be included in the test suites executed against Production and other environments on which we don't want to run those tests.
Note: When running tests locally or configuring a pipeline, the environment variable `QA_CAN_TEST_ADMIN_FEATURES` can be set to `false` to skip tests that have the `:requires_admin` tag.
+
+## Prefer `Commit` resource over `ProjectPush`
+
+In line with [using the API](#prefer-api-over-ui), use a `Commit` resource whenever possible.
+
+`ProjectPush` uses raw shell commands via the Git Command Line Interface (CLI) whereas the `Commit` resource makes an HTTP request.
+
+```ruby
+# Using a commit resource
+Resource::Commit.fabricate_via_api! do |commit|
+ commit.commit_message = 'Initial commit'
+ commit.add_files([
+ {file_path: 'README.md', content: 'Hello, GitLab'}
+ ])
+end
+
+# Using a ProjectPush
+Resource::Repository::ProjectPush.fabricate! do |push|
+ push.commit_message = 'Initial commit'
+ push.file_name = 'README.md'
+ push.file_content = 'Hello, GitLab'
+end
+```
+
+NOTE: **Note:**
+A few exceptions for using a `ProjectPush` would be when your test calls for testing SSH integration or
+using the Git CLI.
diff --git a/doc/development/testing_guide/end_to_end/feature_flags.md b/doc/development/testing_guide/end_to_end/feature_flags.md
index bf1e70be9cb..3bd07f17207 100644
--- a/doc/development/testing_guide/end_to_end/feature_flags.md
+++ b/doc/development/testing_guide/end_to_end/feature_flags.md
@@ -1,11 +1,13 @@
# Testing with feature flags
-To run a specific test with a feature flag enabled you can use the `QA::Runtime::Feature` class to enabled and disable feature flags ([via the API](../../../api/features.md)).
+To run a specific test with a feature flag enabled you can use the `QA::Runtime::Feature` class to enable and disable feature flags ([via the API](../../../api/features.md)).
Note that administrator authorization is required to change feature flags. `QA::Runtime::Feature` will automatically authenticate as an administrator as long as you provide an appropriate access token via `GITLAB_QA_ADMIN_ACCESS_TOKEN` (recommended), or provide `GITLAB_ADMIN_USERNAME` and `GITLAB_ADMIN_PASSWORD`.
+Please be sure to include the tag `:requires_admin` so that the test can be skipped in environments where admin access is not available.
+
```ruby
-context "with feature flag enabled" do
+context "with feature flag enabled", :requires_admin do
before do
Runtime::Feature.enable('feature_flag_name')
end
diff --git a/doc/development/testing_guide/end_to_end/index.md b/doc/development/testing_guide/end_to_end/index.md
index e2622cec6e2..443b7b06a24 100644
--- a/doc/development/testing_guide/end_to_end/index.md
+++ b/doc/development/testing_guide/end_to_end/index.md
@@ -9,8 +9,8 @@ together.
## How do we test GitLab?
-We use [Omnibus GitLab][omnibus-gitlab] to build GitLab packages and then we
-test these packages using the [GitLab QA orchestrator][gitlab-qa] tool, which is
+We use [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab) to build GitLab packages and then we
+test these packages using the [GitLab QA orchestrator](https://gitlab.com/gitlab-org/gitlab-qa) tool, which is
a black-box testing framework for the API and the UI.
### Testing nightly builds
@@ -38,7 +38,7 @@ available for forks).
Omnibus package built from your merge request's changes.**
Manual action that starts end-to-end tests is also available in merge requests
-in [Omnibus GitLab][omnibus-gitlab].
+in [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab).
Below you can read more about how to use it and how does it work.
@@ -68,14 +68,15 @@ subgraph "gitlab-qa-mirror pipeline"
1. Developer triggers a manual action, that can be found in CE / EE merge
requests. This starts a chain of pipelines in multiple projects.
-1. The script being executed triggers a pipeline in [Omnibus GitLab Mirror][omnibus-gitlab-mirror]
+1. The script being executed triggers a pipeline in
+ [Omnibus GitLab Mirror](https://gitlab.com/gitlab-org/omnibus-gitlab-mirror)
and waits for the resulting status. We call this a _status attribution_.
-1. GitLab packages are being built in the [Omnibus GitLab][omnibus-gitlab]
+1. GitLab packages are being built in the [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab)
pipeline. Packages are then pushed to its Container Registry.
1. When packages are ready, and available in the registry, a final step in the
- [Omnibus GitLab][omnibus-gitlab] pipeline, triggers a new
+ [Omnibus GitLab](https://gitlab.com/gitlab-org/omnibus-gitlab) pipeline, triggers a new
GitLab QA pipeline (those with access can view them at `https://gitlab.com/gitlab-org/gitlab-qa-mirror/pipelines`). It also waits for a resulting status.
1. GitLab QA pulls images from the registry, spins-up containers and runs tests
@@ -139,26 +140,23 @@ many of the 10 available jobs that you want to run).
On every pipeline during the `test` stage, the `review-qa-smoke` job is
automatically started: it runs the QA smoke suite against the
-[Review App][review-apps].
+[Review App](../review_apps.md).
You can also manually start the `review-qa-all`: it runs the full QA suite
-against the [Review App][review-apps].
+against the [Review App](../review_apps.md).
**This runs end-to-end tests against a Review App based on [the official GitLab
-Helm chart][helm-chart], itself deployed with custom
-[Cloud Native components][cng] built from your merge request's changes.**
+Helm chart](https://gitlab.com/gitlab-org/charts/gitlab/), itself deployed with custom
+[Cloud Native components](https://gitlab.com/gitlab-org/build/CNG) built from your merge request's changes.**
-See [Review Apps][review-apps] for more details about Review Apps.
-
-[helm-chart]: https://gitlab.com/gitlab-org/charts/gitlab/
-[cng]: https://gitlab.com/gitlab-org/build/CNG
+See [Review Apps](../review_apps.md) for more details about Review Apps.
## How do I run the tests?
If you are not [testing code in a merge request](#testing-code-in-merge-requests),
there are two main options for running the tests. If you simply want to run
the existing tests against a live GitLab instance or against a pre-built docker image
-you can use the [GitLab QA orchestrator][gitlab-qa-readme]. See also [examples
+you can use the [GitLab QA orchestrator](https://gitlab.com/gitlab-org/gitlab-qa/tree/master/README.md). See also [examples
of the test scenarios you can run via the orchestrator](https://gitlab.com/gitlab-org/gitlab-qa/blob/master/docs/what_tests_can_be_run.md#examples).
On the other hand, if you would like to run against a local development GitLab
@@ -173,12 +171,12 @@ Learn how to perform [tests that require special setup or consideration to run o
## How do I write tests?
In order to write new tests, you first need to learn more about GitLab QA
-architecture. See the [documentation about it][gitlab-qa-architecture].
+architecture. See the [documentation about it](https://gitlab.com/gitlab-org/gitlab-qa/blob/master/docs/architecture.md).
-Once you decided where to put [test environment orchestration scenarios] and
-[instance-level scenarios], take a look at the [GitLab QA README][instance-qa-readme],
-the [GitLab QA orchestrator README][gitlab-qa-readme], and [the already existing
-instance-level scenarios][instance-level scenarios].
+Once you decided where to put [test environment orchestration scenarios](https://gitlab.com/gitlab-org/gitlab-qa/tree/master/lib/gitlab/qa/scenario) and
+[instance-level scenarios](https://gitlab.com/gitlab-org/gitlab-foss/tree/master/qa/qa/specs/features), take a look at the [GitLab QA README](https://gitlab.com/gitlab-org/gitlab/tree/master/qa/README.md),
+the [GitLab QA orchestrator README](https://gitlab.com/gitlab-org/gitlab-qa/tree/master/README.md), and [the already existing
+instance-level scenarios](https://gitlab.com/gitlab-org/gitlab-foss/tree/master/qa/qa/specs/features).
Continued reading:
@@ -193,18 +191,5 @@ Continued reading:
You can ask question in the `#quality` channel on Slack (GitLab internal) or
you can find an issue you would like to work on in
-[the `gitlab` issue tracker][gitlab-issues], or
-[the `gitlab-qa` issue tracker][gitlab-qa-issues].
-
-[omnibus-gitlab]: https://gitlab.com/gitlab-org/omnibus-gitlab
-[omnibus-gitlab-mirror]: https://gitlab.com/gitlab-org/omnibus-gitlab-mirror
-[gitlab-qa]: https://gitlab.com/gitlab-org/gitlab-qa
-[gitlab-qa-readme]: https://gitlab.com/gitlab-org/gitlab-qa/tree/master/README.md
-[review-apps]: ../review_apps.md
-[gitlab-qa-architecture]: https://gitlab.com/gitlab-org/gitlab-qa/blob/master/docs/architecture.md
-[gitlab-qa-issues]: https://gitlab.com/gitlab-org/gitlab-qa/issues?label_name%5B%5D=new+scenario
-[gitlab-issues]: https://gitlab.com/gitlab-org/gitlab/issues?label_name[]=QA&label_name[]=test
-[test environment orchestration scenarios]: https://gitlab.com/gitlab-org/gitlab-qa/tree/master/lib/gitlab/qa/scenario
-[instance-level scenarios]: https://gitlab.com/gitlab-org/gitlab-foss/tree/master/qa/qa/specs/features
-[instance-qa-readme]: https://gitlab.com/gitlab-org/gitlab/tree/master/qa/README.md
-[instance-qa-examples]: https://gitlab.com/gitlab-org/gitlab-foss/tree/master/qa/qa
+[the `gitlab` issue tracker](https://gitlab.com/gitlab-org/gitlab/issues?label_name%5B%5D=QA&label_name%5B%5D=test), or
+[the `gitlab-qa` issue tracker](https://gitlab.com/gitlab-org/gitlab-qa/issues?label_name%5B%5D=new+scenario).
diff --git a/doc/development/testing_guide/end_to_end/quick_start_guide.md b/doc/development/testing_guide/end_to_end/quick_start_guide.md
index c721c934033..0ae3f375284 100644
--- a/doc/development/testing_guide/end_to_end/quick_start_guide.md
+++ b/doc/development/testing_guide/end_to_end/quick_start_guide.md
@@ -22,10 +22,10 @@ If you don't exactly understand what we mean by **not everything needs to happen
- [2.](#2-test-skeleton) Creating the skeleton of the test file (`*_spec.rb`)
- [3.](#3-test-cases-mvc) The [MVC](https://about.gitlab.com/handbook/values/#minimum-viable-change-mvc) of the test cases' logic
- [4.](#4-extracting-duplicated-code) Extracting duplicated code into methods
-- [5.](#5-tests-pre-conditions-using-resources-and-page-objects) Tests' pre-conditions (`before :context` and `before`) using resources and [Page Objects]
+- [5.](#5-tests-pre-conditions-using-resources-and-page-objects) Tests' pre-conditions (`before :context` and `before`) using resources and [Page Objects](page_objects.md)
- [6.](#6-optimization) Optimizing the test suite
- [7.](#7-resources) Using and implementing resources
-- [8.](#8-page-objects) Moving element definitions and methods to [Page Objects]
+- [8.](#8-page-objects) Moving element definitions and methods to [Page Objects](page_objects.md)
### 0. Are end-to-end tests needed?
@@ -126,7 +126,7 @@ end
> Notice that the test itself is simple. The most challenging part is the creation of the application state, which will be covered later.
>
-> The exemplified test case's MVC is not enough for the change to be merged, but it helps to build up the test logic. The reason is that we do not want to use locators directly in the tests, and tests **must** use [Page Objects] before they can be merged. This way we better separate the responsibilities, where the Page Objects encapsulate elements and methods that allow us to interact with pages, while the spec files describe the test cases in more business-related language.
+> The exemplified test case's MVC is not enough for the change to be merged, but it helps to build up the test logic. The reason is that we do not want to use locators directly in the tests, and tests **must** use [Page Objects](page_objects.md) before they can be merged. This way we better separate the responsibilities, where the Page Objects encapsulate elements and methods that allow us to interact with pages, while the spec files describe the test cases in more business-related language.
Below are the steps that the test covers:
@@ -294,7 +294,7 @@ In the `before` block we create all the application state needed for the tests t
> A project is created in the background by creating the `issue` resource.
>
-> When creating the [Resources], notice that when calling the `fabricate_via_api` method, we pass some attribute:values, like `title`, and `labels` for the `issue` resource; and `project` and `title` for the `label` resource.
+> When creating the [Resources](resources.md), notice that when calling the `fabricate_via_api` method, we pass some attribute:values, like `title`, and `labels` for the `issue` resource; and `project` and `title` for the `label` resource.
>
> What's important to understand here is that by creating the application state mostly using the public APIs we save a lot of time in the test suite setup stage.
>
@@ -358,7 +358,7 @@ To address point 1, we changed the test implementation from two `it` blocks into
**Note:** When writing this document, some code that is now merged to master was not implemented yet, but we left them here for the readers to understand the whole process of end-to-end test creation.
-You can think of [Resources] as anything that can be created on GitLab CE or EE, either through the GUI, the API, or the CLI.
+You can think of [Resources](resources.md) as anything that can be created on GitLab CE or EE, either through the GUI, the API, or the CLI.
With that in mind, resources can be a project, an epic, an issue, a label, a commit, etc.
@@ -468,7 +468,7 @@ Page Objects are used in end-to-end tests for maintenance reasons, where a page'
> Page Objects are auto-loaded in the [`qa/qa.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/qa/qa.rb) file and available in all the test files (`*_spec.rb`).
-Take a look at the [Page Objects] documentation.
+Take a look at the [Page Objects](page_objects.md) documentation.
Now, let's go back to our example.
@@ -571,7 +571,7 @@ The `text_of_labels_block` method is a simple method that returns the `:labels_b
#### Updates in the view (*.html.haml) and `dropdowns_helper.rb` files
-Now let's change the view and the `dropdowns_helper` files to add the selectors that relate to the [Page Objects].
+Now let's change the view and the `dropdowns_helper` files to add the selectors that relate to the [Page Objects](page_objects.md).
In [`app/views/shared/issuable/_sidebar.html.haml:105`](https://gitlab.com/gitlab-org/gitlab/blob/7ca12defc7a965987b162a6ebef302f95dc8867f/app/views/shared/issuable/_sidebar.html.haml#L105), add a `data: { qa_selector: 'edit_link_labels' }` data attribute.
@@ -619,6 +619,3 @@ This method receives an element (`name`) and the `keys` that it will send to tha
As you might remember, in the Issue Page Object we call this method like this: `send_keys_to_element(:dropdown_input_field, [label, :enter])`.
With that, you should be able to start writing end-to-end tests yourself. *Congratulations!*
-
-[Page Objects]: page_objects.md
-[Resources]: resources.md
diff --git a/doc/development/testing_guide/flaky_tests.md b/doc/development/testing_guide/flaky_tests.md
index a2fb5a2d4ce..1e53e92fad5 100644
--- a/doc/development/testing_guide/flaky_tests.md
+++ b/doc/development/testing_guide/flaky_tests.md
@@ -40,8 +40,8 @@ Quarantined tests are run on the CI in dedicated jobs that are allowed to fail:
## Automatic retries and flaky tests detection
-On our CI, we use [rspec-retry] to automatically retry a failing example a few
-times (see [`spec/spec_helper.rb`] for the precise retries count).
+On our CI, we use [rspec-retry](https://github.com/NoRedInk/rspec-retry) to automatically retry a failing example a few
+times (see [`spec/spec_helper.rb`](https://gitlab.com/gitlab-org/gitlab/blob/master/spec/spec_helper.rb) for the precise retries count).
We also use a home-made `RspecFlaky::Listener` listener which records flaky
examples in a JSON report file on `master` (`retrieve-tests-metadata` and
@@ -52,9 +52,6 @@ This was originally implemented in: <https://gitlab.com/gitlab-org/gitlab-foss/-
If you want to enable retries locally, you can use the `RETRIES` env variable.
For instance `RETRIES=1 bin/rspec ...` would retry the failing examples once.
-[rspec-retry]: https://github.com/NoRedInk/rspec-retry
-[`spec/spec_helper.rb`]: https://gitlab.com/gitlab-org/gitlab/blob/master/spec/spec_helper.rb
-
## Problems we had in the past at GitLab
- [`rspec-retry` is biting us when some API specs fail](https://gitlab.com/gitlab-org/gitlab-foss/issues/29242): <https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/9825>
diff --git a/doc/development/testing_guide/review_apps.md b/doc/development/testing_guide/review_apps.md
index c84a5466e03..efa58cbeae3 100644
--- a/doc/development/testing_guide/review_apps.md
+++ b/doc/development/testing_guide/review_apps.md
@@ -44,36 +44,36 @@ subgraph "CNG-mirror pipeline"
### Detailed explanation
-1. On every [pipeline][gitlab-pipeline] during the `test` stage, the
- [`gitlab:assets:compile`][gitlab:assets:compile pull-cache] job is automatically started.
- - Once it's done, it starts the [`review-build-cng`][review-build-cng]
- manual job since the [`CNG-mirror`][cng-mirror] pipeline triggered in the
+1. On every [pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/125315730) during the `test` stage, the
+ [`gitlab:assets:compile`](https://gitlab.com/gitlab-org/gitlab/-/jobs/467724487) job is automatically started.
+ - Once it's done, it starts the [`review-build-cng`](https://gitlab.com/gitlab-org/gitlab/-/jobs/467724808)
+ manual job since the [`CNG-mirror`](https://gitlab.com/gitlab-org/build/CNG-mirror) pipeline triggered in the
following step depends on it.
-1. The [`review-build-cng`][review-build-cng] job [triggers a pipeline][cng-mirror-pipeline]
- in the [`CNG-mirror`][cng-mirror] project.
- - The [`CNG-mirror`][cng-mirror-pipeline] pipeline creates the Docker images of
+1. The [`review-build-cng`](https://gitlab.com/gitlab-org/gitlab/-/jobs/467724808) job [triggers a pipeline](https://gitlab.com/gitlab-org/build/CNG-mirror/pipelines/44364657)
+ in the [`CNG-mirror`](https://gitlab.com/gitlab-org/build/CNG-mirror) project.
+ - The [`CNG-mirror`](https://gitlab.com/gitlab-org/build/CNG-mirror/pipelines/44364657) pipeline creates the Docker images of
each component (e.g. `gitlab-rails-ee`, `gitlab-shell`, `gitaly` etc.)
- based on the commit from the [GitLab pipeline][gitlab-pipeline] and stores
- them in its [registry][cng-mirror-registry].
- - We use the [`CNG-mirror`][cng-mirror] project so that the `CNG`, (**C**loud
+ based on the commit from the [GitLab pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/125315730) and stores
+ them in its [registry](https://gitlab.com/gitlab-org/build/CNG-mirror/container_registry).
+ - We use the [`CNG-mirror`](https://gitlab.com/gitlab-org/build/CNG-mirror) project so that the `CNG`, (**C**loud
**N**ative **G**itLab), project's registry is not overloaded with a
lot of transient Docker images.
- Note that the official CNG images are built by the `cloud-native-image`
- job, which runs only for tags, and triggers itself a [`CNG`][cng] pipeline.
-1. Once the `test` stage is done, the [`review-deploy`][review-deploy] job
- deploys the Review App using [the official GitLab Helm chart][helm-chart] to
- the [`review-apps-ce`][review-apps-ce] / [`review-apps-ee`][review-apps-ee]
+ job, which runs only for tags, and triggers itself a [`CNG`](https://gitlab.com/gitlab-org/build/CNG) pipeline.
+1. Once the `test` stage is done, the [`review-deploy`](https://gitlab.com/gitlab-org/gitlab/-/jobs/467724810) job
+ deploys the Review App using [the official GitLab Helm chart](https://gitlab.com/gitlab-org/charts/gitlab/) to
+ the [`review-apps-ce`](https://console.cloud.google.com/kubernetes/clusters/details/us-central1-a/review-apps-ce?project=gitlab-review-apps) / [`review-apps-ee`](https://console.cloud.google.com/kubernetes/clusters/details/us-central1-b/review-apps-ee?project=gitlab-review-apps)
Kubernetes cluster on GCP.
- The actual scripts used to deploy the Review App can be found at
- [`scripts/review_apps/review-apps.sh`][review-apps.sh].
+ [`scripts/review_apps/review-apps.sh`](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/review-apps.sh).
- These scripts are basically
- [our official Auto DevOps scripts][Auto-DevOps.gitlab-ci.yml] where the
+ [our official Auto DevOps scripts](https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml) where the
default CNG images are overridden with the images built and stored in the
- [`CNG-mirror` project's registry][cng-mirror-registry].
- - Since we're using [the official GitLab Helm chart][helm-chart], this means
+ [`CNG-mirror` project's registry](https://gitlab.com/gitlab-org/build/CNG-mirror/container_registry).
+ - Since we're using [the official GitLab Helm chart](https://gitlab.com/gitlab-org/charts/gitlab/), this means
you get a dedicated environment for your branch that's very close to what
it would look in production.
-1. Once the [`review-deploy`][review-deploy] job succeeds, you should be able to
+1. Once the [`review-deploy`](https://gitlab.com/gitlab-org/gitlab/-/jobs/467724810) job succeeds, you should be able to
use your Review App thanks to the direct link to it from the MR widget. To log
into the Review App, see "Log into my Review App?" below.
@@ -95,7 +95,7 @@ subgraph "CNG-mirror pipeline"
stop a Review App manually, and is also started by GitLab once a merge
request's branch is deleted after being merged.
- The Kubernetes cluster is connected to the `gitlab-{ce,ee}` projects using
- [GitLab's Kubernetes integration][gitlab-k8s-integration]. This basically
+ [GitLab's Kubernetes integration](../../user/project/clusters/index.md). This basically
allows to have a link to the Review App directly from the merge request
widget.
@@ -119,7 +119,7 @@ that were not removed along with the Kubernetes resources.
## QA runs
-On every [pipeline][gitlab-pipeline] in the `qa` stage (which comes after the
+On every [pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/125315730) in the `qa` stage (which comes after the
`review` stage), the `review-qa-smoke` job is automatically started and it runs
the QA smoke suite.
@@ -127,7 +127,7 @@ You can also manually start the `review-qa-all`: it runs the full QA suite.
## Performance Metrics
-On every [pipeline][gitlab-pipeline] in the `qa` stage, the
+On every [pipeline](https://gitlab.com/gitlab-org/gitlab/pipelines/125315730) in the `qa` stage, the
`review-performance` job is automatically started: this job does basic
browser performance testing using a
[Sitespeed.io Container](../../user/project/merge_requests/browser_performance_testing.md).
@@ -287,7 +287,7 @@ kubectl get cm --sort-by='{.metadata.creationTimestamp}' | grep 'review-' | grep
### Using K9s
-[K9s] is a powerful command line dashboard which allows you to filter by labels. This can help identify trends with apps exceeding the [review-app resource requests](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/base-config.yaml). Kubernetes will schedule pods to nodes based on resource requests and allow for CPU usage up to the limits.
+[K9s](https://github.com/derailed/k9s) is a powerful command line dashboard which allows you to filter by labels. This can help identify trends with apps exceeding the [review-app resource requests](https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/base-config.yaml). Kubernetes will schedule pods to nodes based on resource requests and allow for CPU usage up to the limits.
- In K9s you can sort or add filters by typing the `/` character
- `-lrelease=<review-app-slug>` - filters down to all pods for a release. This aids in determining what is having issues in a single deployment
@@ -389,27 +389,9 @@ find a way to limit it to only us.**
### Helpful command line tools
-- [K9s] - enables CLI dashboard across pods and enabling filtering by labels
+- [K9s](https://github.com/derailed/k9s) - enables CLI dashboard across pods and enabling filtering by labels
- [Stern](https://github.com/wercker/stern) - enables cross pod log tailing based on label/field selectors
-[charts-1068]: https://gitlab.com/gitlab-org/charts/gitlab/issues/1068
-[gitlab-pipeline]: https://gitlab.com/gitlab-org/gitlab/pipelines/125315730
-[gitlab:assets:compile pull-cache]: https://gitlab.com/gitlab-org/gitlab/-/jobs/467724487
-[review-build-cng]: https://gitlab.com/gitlab-org/gitlab/-/jobs/467724808
-[review-deploy]: https://gitlab.com/gitlab-org/gitlab/-/jobs/467724810
-[cng-mirror]: https://gitlab.com/gitlab-org/build/CNG-mirror
-[cng]: https://gitlab.com/gitlab-org/build/CNG
-[cng-mirror-pipeline]: https://gitlab.com/gitlab-org/build/CNG-mirror/pipelines/44364657
-[cng-mirror-registry]: https://gitlab.com/gitlab-org/build/CNG-mirror/container_registry
-[helm-chart]: https://gitlab.com/gitlab-org/charts/gitlab/
-[review-apps-ce]: https://console.cloud.google.com/kubernetes/clusters/details/us-central1-a/review-apps-ce?project=gitlab-review-apps
-[review-apps-ee]: https://console.cloud.google.com/kubernetes/clusters/details/us-central1-b/review-apps-ee?project=gitlab-review-apps
-[review-apps.sh]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/review-apps.sh
-[automated_cleanup.rb]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/scripts/review_apps/automated_cleanup.rb
-[Auto-DevOps.gitlab-ci.yml]: https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Auto-DevOps.gitlab-ci.yml
-[gitlab-k8s-integration]: ../../user/project/clusters/index.md
-[K9s]: https://github.com/derailed/k9s
-
---
[Return to Testing documentation](index.md)
diff --git a/doc/gitlab-basics/create-project.md b/doc/gitlab-basics/create-project.md
index 34e3ff7a6fa..1febe8337bc 100644
--- a/doc/gitlab-basics/create-project.md
+++ b/doc/gitlab-basics/create-project.md
@@ -80,10 +80,26 @@ To use a built-in template on the **New project** page:
1. Finish creating the project by filling out the project's details. The process is
the same as creating a [blank project](#blank-projects).
+##### Enterprise templates **(ULTIMATE)**
+
+GitLab is developing Enterprise templates to help you streamline audit management with selected regulatory standards. These templates automatically import issues that correspond to each regulatory requirement.
+
+To create a new project with an Enterprise template, on the **New project** page:
+
+1. On the **Create from template** tab, select the **Built-in** tab.
+1. From the list of available built-in Enterprise templates, click the:
+ - **Preview** button to look at the template source itself.
+ - **Use template** button to start creating the project.
+1. Finish creating the project by filling out the project's details. The process is the same as creating a [blank project](#blank-projects).
+
+Available Enterprise templates include:
+
+- HIPAA Audit Protocol template ([introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/13756) in GitLab 12.10)
+
TIP: **Tip:**
You can improve the existing built-in templates or contribute new ones in the
[`project-templates`](https://gitlab.com/gitlab-org/project-templates) and
-[`pages`](https://gitlab.com/pages) groups.
+[`pages`](https://gitlab.com/pages) groups by following [these steps](https://gitlab.com/gitlab-org/project-templates/contributing).
#### Custom project templates **(PREMIUM)**
diff --git a/doc/integration/elasticsearch.md b/doc/integration/elasticsearch.md
index 48f39ea4bc9..fcd1c03a556 100644
--- a/doc/integration/elasticsearch.md
+++ b/doc/integration/elasticsearch.md
@@ -361,7 +361,7 @@ or creating [extra Sidekiq processes](../administration/operations/extra_sidekiq
1. Enable replication and refreshing again after indexing (only if you previously disabled it):
```shell
- curl --request PUT localhost:9200/gitlab-production/_settings --header 'Content-Type: application/json' ---data '{
+ curl --request PUT localhost:9200/gitlab-production/_settings --header 'Content-Type: application/json' --data '{
"index" : {
"number_of_replicas" : 1,
"refresh_interval" : "1s"
@@ -373,7 +373,7 @@ or creating [extra Sidekiq processes](../administration/operations/extra_sidekiq
For Elasticsearch 6.x, the index should be in read-only mode before proceeding with the force merge:
```shell
- curl --request PUT localhost:9200/gitlab-production/_settings ---header 'Content-Type: application/json' --data '{
+ curl --request PUT localhost:9200/gitlab-production/_settings --header 'Content-Type: application/json' --data '{
"settings": {
"index.blocks.write": true
} }'
@@ -388,7 +388,7 @@ or creating [extra Sidekiq processes](../administration/operations/extra_sidekiq
After this, if your index is in read-only mode, switch back to read-write:
```shell
- curl --request PUT localhost:9200/gitlab-production/_settings ---header 'Content-Type: application/json' --data '{
+ curl --request PUT localhost:9200/gitlab-production/_settings --header 'Content-Type: application/json' --data '{
"settings": {
"index.blocks.write": false
} }'
@@ -634,6 +634,14 @@ Here are some common pitfalls and how to overcome them:
You probably have not used either `http://` or `https://` as part of your value in the **"URL"** field of the Elasticseach Integration Menu. Please make sure you are using either `http://` or `https://` in this field as the [Elasticsearch client for Go](https://github.com/olivere/elastic) that we are using [needs the prefix for the URL to be accepted as valid](https://github.com/olivere/elastic/commit/a80af35aa41856dc2c986204e2b64eab81ccac3a).
Once you have corrected the formatting of the URL, delete the index (via the [dedicated Rake task](#gitlab-elasticsearch-rake-tasks)) and [reindex the content of your instance](#adding-gitlabs-data-to-the-elasticsearch-index).
+### Known Issues
+
+- **[Elasticsearch `code_analyzer` doesn't account for all code cases](https://gitlab.com/gitlab-org/gitlab/issues/10693)**
+
+ The `code_analyzer` pattern and filter configuration is being evaluated for improvement. We have noticed [several edge cases](https://gitlab.com/gitlab-org/gitlab/-/issues/10693#note_158382332) that are not returning expected search results due to our pattern and filter configuration.
+
+ An improved strategy for the `code_analyzer` pattern and filters are being discussed in [issue 29443](https://gitlab.com/gitlab-org/gitlab/-/issues/29443).
+
### Reverting to basic search
Sometimes there may be issues with your Elasticsearch index data and as such
diff --git a/doc/subscriptions/index.md b/doc/subscriptions/index.md
index e2868d648de..12d5aa1e29f 100644
--- a/doc/subscriptions/index.md
+++ b/doc/subscriptions/index.md
@@ -75,6 +75,10 @@ count as active users in the subscription period in which they were originally a
- Members with Guest permissions on an Ultimate subscription.
- GitLab-created service accounts: `Ghost User` and `Support Bot`.
+##### User Statistics
+
+A breakdown of the users within your instance including active, billable and blocked can be found by navigating to **Admin Area > Overview > Dashboard** and selecting `Users Statistics` button within the `Users` widget..
+
NOTE: **Note:**
If you have LDAP integration enabled, anyone in the configured domain can sign up for a GitLab account. This can result in an unexpected bill at time of renewal. Consider [disabling new signups](../user/admin_area/settings/sign_up_restrictions.md) and managing new users manually instead.
@@ -444,7 +448,7 @@ of the group/namespace. You can [purchase additional CI minutes](#purchasing-add
If you're using GitLab.com, you can purchase additional CI minutes so your
pipelines won't be blocked after you have used all your CI minutes from your
-main quota. Additional minutes:
+main quota. You can find pricing for additional CI/CD minutes in the [GitLab Customers Portal](https://customers.gitlab.com/plans). Additional minutes:
- Are only used once the shared quota included in your subscription runs out.
- Roll over month to month.
diff --git a/doc/topics/autodevops/stages.md b/doc/topics/autodevops/stages.md
index 265a117ced1..72fa3870abd 100644
--- a/doc/topics/autodevops/stages.md
+++ b/doc/topics/autodevops/stages.md
@@ -533,6 +533,11 @@ The metrics include:
- **Response Metrics:** latency, throughput, error rate
- **System Metrics:** CPU utilization, memory utilization
+GitLab provides some initial alerts for you after you install Prometheus:
+
+- Ingress status code `500` > 0.1%
+- NGINX status code `500` > 0.1%
+
To make use of Auto Monitoring:
1. [Install and configure the requirements](index.md#requirements).
diff --git a/doc/university/README.md b/doc/university/README.md
index 5c51dcc87a2..84e3b84139b 100644
--- a/doc/university/README.md
+++ b/doc/university/README.md
@@ -7,7 +7,7 @@ type: index
GitLab University is a great place to start when learning about version control with Git and GitLab, as well as other GitLab features.
-If you're looking for a GitLab subscription for _your university_, see our [Education](https://about.gitlab.com/solutions/education/) page.
+If you're looking for a GitLab subscription for _your university_, see our [GitLab for Education](https://about.gitlab.com/solutions/education/) page.
CAUTION: **Caution:**
Some of the content in GitLab University may be out of date and we plan to
diff --git a/doc/user/analytics/value_stream_analytics.md b/doc/user/analytics/value_stream_analytics.md
index 22af788b6f5..703b794981f 100644
--- a/doc/user/analytics/value_stream_analytics.md
+++ b/doc/user/analytics/value_stream_analytics.md
@@ -302,6 +302,14 @@ For Value Stream Analytics functionality introduced in GitLab 12.3 and later:
- Features are available only on
[Premium or Silver tiers](https://about.gitlab.com/pricing/) and above.
+## Troubleshooting
+
+If you see an error as listed in the following table, try the noted solution:
+
+| Error | Solution |
+|---------------------------------------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| There was an error fetching the top labels. | Manually enable tasks by type feature in the [rails console](../../administration/troubleshooting/navigating_gitlab_via_rails_console.md#starting-a-rails-console-session), specifically `Feature.enable(:tasks_by_type_chart)`. |
+
## More resources
Learn more about Value Stream Analytics in the following resources:
diff --git a/doc/user/application_security/container_scanning/index.md b/doc/user/application_security/container_scanning/index.md
index 3d6c9e0b0ba..27b22fb925c 100644
--- a/doc/user/application_security/container_scanning/index.md
+++ b/doc/user/application_security/container_scanning/index.md
@@ -67,10 +67,10 @@ To enable Container Scanning in your pipeline, you need:
```yaml
build:
- image: docker:19.03.1
+ image: docker:19.03.8
stage: build
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
script:
@@ -118,7 +118,7 @@ variables:
DOCKER_DRIVER: overlay2
services:
- - docker:19.03.5-dind
+ - docker:19.03.8-dind
stages:
- build
@@ -158,9 +158,9 @@ variables:
The `CLAIR_OUTPUT` variable defined in the main `gitlab-ci.yml` will overwrite what's
defined in `Container-Scanning.gitlab-ci.yml`, changing the Container Scanning behavior.
-[//]: # "NOTE: The container scanning tool references the following heading in the code, so if you"
-[//]: # " make a change to this heading, make sure to update the documentation URLs used in the"
-[//]: # " container scanning tool (https://gitlab.com/gitlab-org/security-products/analyzers/klar)"
+<!-- NOTE: The container scanning tool references the following heading in the code, so if you"
+ make a change to this heading, make sure to update the documentation URLs used in the"
+ container scanning tool (https://gitlab.com/gitlab-org/security-products/analyzers/klar)" -->
#### Available variables
@@ -240,7 +240,7 @@ It may be worthwhile to set up a [scheduled pipeline](../../../ci/pipelines/sche
image: docker:stable
services:
- - docker:19.03.5-dind
+ - docker:19.03.8-dind
stages:
- build
diff --git a/doc/user/application_security/dast/index.md b/doc/user/application_security/dast/index.md
index c65d6adcff6..c9c7129dd7b 100644
--- a/doc/user/application_security/dast/index.md
+++ b/doc/user/application_security/dast/index.md
@@ -148,6 +148,9 @@ The results will be saved as a
that you can later download and analyze.
Due to implementation limitations, we always take the latest DAST artifact available.
+DANGER: **Danger:**
+**DO NOT** run an authenticated scan against a production server. When an authenticated scan is run, it may perform *any* function that the authenticated user can. This includes modifying and deleting data, submitting forms, following links, and so on. Only run an authenticated scan against a test server.
+
### Full scan
DAST can be configured to perform [ZAP Full Scan](https://github.com/zaproxy/zaproxy/wiki/ZAP-Full-Scan), which
@@ -463,10 +466,41 @@ The DAST job does not require the project's repository to be present when runnin
## Running DAST in an offline environment
-DAST can be executed on an offline GitLab Ultimate installation by using the following process:
+For self-managed GitLab instances in an environment with limited, restricted, or intermittent access
+to external resources through the internet, some adjustments are required for the DAST job to
+successfully run. For more information, see [Offline environments](../offline_deployments/index.md).
+
+### Requirements for offline DAST support
+
+To use DAST in an offline environment, you need:
+
+- GitLab Runner with the [`docker` or `kubernetes` executor](#requirements).
+- Docker Container Registry with a locally available copy of the DAST [container image](https://gitlab.com/gitlab-org/security-products/dast), found in the [DAST container registry](https://gitlab.com/gitlab-org/security-products/dast/container_registry).
+
+NOTE: **Note:**
+GitLab Runner has a [default `pull policy` of `always`](https://docs.gitlab.com/runner/executors/docker.html#using-the-always-pull-policy),
+meaning the runner may try to pull remote images even if a local copy is available. Set GitLab
+Runner's [`pull_policy` to `if-not-present`](https://docs.gitlab.com/runner/executors/docker.html#using-the-if-not-present-pull-policy)
+in an offline environment if you prefer using only locally available Docker images.
+
+### Make GitLab DAST analyzer images available inside your Docker registry
+
+For DAST, import the following default DAST analyzer image from `registry.gitlab.com` to your local "offline"
+registry:
+
+- `registry.gitlab.com/gitlab-org/security-products/dast:latest`
+
+The process for importing Docker images into a local offline Docker registry depends on
+**your network security policy**. Please consult your IT staff to find an accepted and approved
+process by which external resources can be imported or temporarily accessed. Note that these scanners are [updated periodically](../index.md#maintenance-and-update-of-the-vulnerabilities-database)
+with new definitions, so consider if you are able to make periodic updates yourself.
+
+For details on saving and transporting Docker images as a file, see Docker's documentation on
+[`docker save`](https://docs.docker.com/engine/reference/commandline/save/), [`docker load`](https://docs.docker.com/engine/reference/commandline/load/),
+[`docker export`](https://docs.docker.com/engine/reference/commandline/export/), and [`docker import`](https://docs.docker.com/engine/reference/commandline/import/).
+
+### Set DAST CI job variables to use local DAST analyzers
-1. Host the DAST image `registry.gitlab.com/gitlab-org/security-products/dast:latest` in your local
- Docker container registry.
1. Add the following configuration to your `.gitlab-ci.yml` file. You must replace `image` to refer
to the DAST Docker image hosted on your local Docker container registry:
diff --git a/doc/user/application_security/dependency_scanning/index.md b/doc/user/application_security/dependency_scanning/index.md
index 781bda47a43..ae006178945 100644
--- a/doc/user/application_security/dependency_scanning/index.md
+++ b/doc/user/application_security/dependency_scanning/index.md
@@ -46,7 +46,7 @@ this is enabled by default.
CAUTION: **Caution:**
If you use your own Runners, make sure that the Docker version you have installed
-is **not** `19.03.00`. See [troubleshooting information](#error-response-from-daemon-error-processing-tar-file-docker-tar-relocation-error) for details.
+is **not** `19.03.0`. See [troubleshooting information](#error-response-from-daemon-error-processing-tar-file-docker-tar-relocation-error) for details.
Privileged mode is not necessary if you've [disabled Docker in Docker for Dependency Scanning](#disabling-docker-in-docker-for-dependency-scanning)
@@ -178,8 +178,9 @@ The following variables are used for configuring specific analyzers (used for a
| `BUNDLER_AUDIT_UPDATE_DISABLED` | `bundler-audit` | `"false"` | Disable automatic updates for the `bundler-audit` analyzer. Useful if you're running Dependency Scanning in an offline, air-gapped environment.|
| `BUNDLER_AUDIT_ADVISORY_DB_URL` | `bundler-audit` | `https://github.com/rubysec/ruby-advisory-db` | URL of the advisory database used by bundler-audit. |
| `BUNDLER_AUDIT_ADVISORY_DB_REF_NAME` | `bundler-audit` | `master` | Git ref for the advisory database specified by `BUNDLER_AUDIT_ADVISORY_DB_URL`. |
-| `RETIREJS_JS_ADVISORY_DB` | `retire.js` | `https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository.json` | Path or URL to Retire.js js vulnerability data file. |
-| `RETIREJS_NODE_ADVISORY_DB` | `retire.js` | `https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/npmrepository.json` | Path or URL to Retire.js node vulnerability data file. |
+| `RETIREJS_JS_ADVISORY_DB` | `retire.js` | `https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/jsrepository.json` | Path or URL to `retire.js` JS vulnerability data file. Note that if the URL hosting the data file uses a custom SSL certificate, for example in an offline installation, you can pass the certificate in the `ADDITIONAL_CA_CERT_BUNDLE` environment variable. |
+| `RETIREJS_NODE_ADVISORY_DB` | `retire.js` | `https://raw.githubusercontent.com/RetireJS/retire.js/master/repository/npmrepository.json` | Path or URL to `retire.js` node vulnerability data file. Note that if the URL hosting the data file uses a custom SSL certificate, for example in an offline installation, you can pass the certificate in the `ADDITIONAL_CA_CERT_BUNDLE` environment variable. |
+| `RETIREJS_ADVISORY_DB_INSECURE` | `retire.js` | `false` | Enable fetching remote JS and Node vulnerability data files (defined by the `RETIREJS_JS_ADVISORY_DB` and `RETIREJS_NODE_ADVISORY_DB` variables) from hosts using an insecure or self-signed SSL (TLS) certificate. |
### Using private Maven repos
@@ -418,7 +419,7 @@ You can also [submit new vulnerabilities](https://gitlab.com/gitlab-org/security
### Error response from daemon: error processing tar file: docker-tar: relocation error
-This error occurs when the Docker version used to run the SAST job is `19.03.00`.
-You are advised to update to Docker `19.03.01` or greater. Older versions are not
+This error occurs when the Docker version used to run the SAST job is `19.03.0`.
+You are advised to update to Docker `19.03.1` or greater. Older versions are not
affected. Read more in
[this issue](https://gitlab.com/gitlab-org/gitlab/issues/13830#note_211354992 "Current SAST container fails").
diff --git a/doc/user/application_security/sast/index.md b/doc/user/application_security/sast/index.md
index 75afdfb5cf5..011f95c7049 100644
--- a/doc/user/application_security/sast/index.md
+++ b/doc/user/application_security/sast/index.md
@@ -58,7 +58,7 @@ CAUTION: **Caution:** Our SAST jobs currently expect a Linux container type. Win
CAUTION: **Caution:**
If you use your own Runners, make sure that the Docker version you have installed
-is **not** `19.03.00`. See [troubleshooting information](#error-response-from-daemon-error-processing-tar-file-docker-tar-relocation-error) for details.
+is **not** `19.03.0`. See [troubleshooting information](#error-response-from-daemon-error-processing-tar-file-docker-tar-relocation-error) for details.
## Supported languages and frameworks
@@ -582,7 +582,7 @@ security reports without requiring internet access.
### Error response from daemon: error processing tar file: docker-tar: relocation error
-This error occurs when the Docker version used to run the SAST job is `19.03.00`.
-You are advised to update to Docker `19.03.01` or greater. Older versions are not
+This error occurs when the Docker version used to run the SAST job is `19.03.0`.
+You are advised to update to Docker `19.03.1` or greater. Older versions are not
affected. Read more in
[this issue](https://gitlab.com/gitlab-org/gitlab/issues/13830#note_211354992 "Current SAST container fails").
diff --git a/doc/user/clusters/applications.md b/doc/user/clusters/applications.md
index f8fd07276c5..cc7b5dcd5fb 100644
--- a/doc/user/clusters/applications.md
+++ b/doc/user/clusters/applications.md
@@ -43,6 +43,7 @@ The following applications can be installed:
- [Knative](#knative)
- [Crossplane](#crossplane)
- [Elastic Stack](#elastic-stack)
+- [Fluentd](#fluentd)
With the exception of Knative, the applications will be installed in a dedicated
namespace called `gitlab-managed-apps`.
@@ -297,6 +298,22 @@ Ingress with the recent changes.
![Disabling WAF](../../topics/web_application_firewall/img/guide_waf_ingress_save_changes_v12_10.png)
+##### Logging and blocking modes
+
+To help you tune your WAF rules, you can globally set your WAF to either
+**Logging** or **Blocking** mode:
+
+- **Logging mode** - Allows traffic matching the rule to pass, and logs the event.
+- **Blocking mode** - Prevents traffic matching the rule from passing, and logs the event.
+
+To change your WAF's mode:
+
+1. [Install ModSecurity](../../topics/web_application_firewall/quick_start_guide.md) if you have not already done so.
+1. Navigate to **{cloud-gear}** **Operations > Kubernetes**.
+1. In **Applications**, scroll to **Ingress**.
+1. Under **Global default**, select your desired mode.
+1. Click **Save changes**.
+
##### Viewing Web Application Firewall traffic
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/14707) in [GitLab Ultimate](https://about.gitlab.com/pricing/) 12.9.
@@ -523,6 +540,28 @@ kubectl port-forward svc/kibana 5601:443
Then, you can visit Kibana at `http://localhost:5601`.
+### Fluentd
+
+> Introduced in GitLab 12.10 for project- and group-level clusters.
+
+[Fluentd](https://www.fluentd.org/) is an open source data collector, which enables
+you to unify the data collection and consumption to better use and understand
+your data. Fluentd sends logs in syslog format.
+
+To enable Fluentd:
+
+1. Navigate to **{cloud-gear}** **Operations > Kubernetes** and click
+ **Applications**. You will be prompted to enter a host, port and protocol
+ where the WAF logs will be sent to via syslog.
+1. Provide the host domain name or URL in **SIEM URL or Host**.
+1. Provide the host port number in **SIEM Port**.
+1. Select a **SIEM Protocol**.
+1. Check **Send ModSecurity Logs**. If you do not select this checkbox, the **Install**
+ button is disabled.
+1. Click **Install**.
+
+![Fluentd input fields](img/fluentd_v12_10.png)
+
### Future apps
Interested in contributing a new GitLab managed app? Visit the
@@ -552,6 +591,7 @@ Supported applications:
- [JupyterHub](#install-jupyterhub-using-gitlab-cicd)
- [Elastic Stack](#install-elastic-stack-using-gitlab-cicd)
- [Crossplane](#install-crossplane-using-gitlab-cicd)
+- [Fluentd](#install-fluentd-using-gitlab-cicd)
### Usage
@@ -1036,6 +1076,30 @@ management project. Refer to the
[chart](https://github.com/crossplane/crossplane/tree/master/cluster/charts/crossplane#configuration) for the
available configuration options. Note that this link points to the docs for the current development release, which may differ from the version you have installed. You can check out a specific version in the branch/tag switcher.
+### Install Fluentd using GitLab CI/CD
+
+> [Introduced](https://gitlab.com/gitlab-org/cluster-integration/cluster-applications/-/merge_requests/76) in GitLab 12.10.
+
+To install Fluentd into the `gitlab-managed-apps` namespace of your cluster using GitLab CI/CD, define the following configuration in `.gitlab/managed-apps/config.yaml`:
+
+```yaml
+Fluentd:
+ installed: true
+```
+
+You can also review the default values set for this chart in the [values.yaml](https://github.com/helm/charts/blob/master/stable/fluentd/values.yaml) file.
+
+You can customize the installation of Fluentd by defining
+`.gitlab/managed-apps/fluentd/values.yaml` file in your cluster management
+project. Refer to the
+[configuration chart for the current development release of Fluentd](https://github.com/helm/charts/tree/master/stable/fluentd#configuration)
+for the available configuration options.
+
+NOTE: **Note:**
+The configuration chart link points to the current development release, which
+may differ from the version you have installed. To ensure compatibility, switch
+to the specific branch or tag you are using.
+
## Upgrading applications
> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/24789) in GitLab 11.8.
diff --git a/doc/user/clusters/img/fluentd_v12_10.png b/doc/user/clusters/img/fluentd_v12_10.png
new file mode 100644
index 00000000000..7593f99ab51
--- /dev/null
+++ b/doc/user/clusters/img/fluentd_v12_10.png
Binary files differ
diff --git a/doc/user/group/saml_sso/index.md b/doc/user/group/saml_sso/index.md
index 4fcb5064c8c..2b4170d21af 100644
--- a/doc/user/group/saml_sso/index.md
+++ b/doc/user/group/saml_sso/index.md
@@ -42,7 +42,8 @@ GitLab.com uses the SAML NameID to identify users. The NameID element:
- Is case sensitive. The NameID must match exactly on subsequent login attempts, so should not rely on user input that could change between upper and lower case.
- Should not be an email address or username. We strongly recommend against these as it is hard to guarantee they will never change, for example when a person's name changes. Email addresses are also case-insensitive, which can result in users being unable to sign in.
-The recommended field for supported providers are in the [provider specific notes](#providers).
+The relevant field name and recommended value for supported providers are in the [provider specific notes](#providers).
+appropriate corresponding field.
CAUTION: **Warning:**
Once users have signed into GitLab using the SSO SAML setup, changing the `NameID` will break the configuration and potentially lock users out of the GitLab group.
@@ -407,11 +408,13 @@ If you do not wish to use that GitLab user with the SAML login, you can [unlink
### Message: "SAML authentication failed: User has already been taken"
-The user you are signed in with already has SAML linked to a different identity. This might mean you've attempted to link multiple SAML identities to the same user for a given Identity Provider. This could also be a symptom of the Identity Provider returning an inconsistent [NameID](#nameid).
+The user that you're signed in with already has SAML linked to a different identity.
+Here are possible causes and solutions:
-To change which identity you sign in with, you can [unlink the previous SAML identity](#unlinking-accounts) from this GitLab account.
-
-Alternatively, an admin of your Identity Provider can use the [SCIM API](../../../api/scim.md) to update your `extern_uid` to match the current **NameID**.
+| Cause | Solution |
+|------------------------------------------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| You've tried to link multiple SAML identities to the same user, for a given Identity Provider. | Change the identity that you sign in with. To do so, [unlink the previous SAML identity](#unlinking-accounts) from this GitLab account before attempting to sign in again. |
+| The Identity Provider might be returning an inconsistent [NameID](#nameid). | Ask an admin of your Identity Provider to use the [SCIM API](../../../api/scim.md) to update your `extern_uid` to match the current **NameID**. |
### Message: "SAML authentication failed: Email has already been taken"
@@ -427,13 +430,13 @@ This can be prevented by configuring the [NameID](#nameid) to return a consisten
### The NameID has changed
-As mentioned in the [NameID](#nameid) section, if the NameID changes for any user, the user can be locked out. This is common for setups using an email address as the identifier.
-
-To fix the issue, follow the steps outlined in the ["SAML authentication failed: User has already been taken"](#message-saml-authentication-failed-user-has-already-been-taken) section. We recommend using the API method if many users are affected so that the changes can be done in a scripted batch.
+| Cause | Solution |
+|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
+| As mentioned in the [NameID](#nameid) section, if the NameID changes for any user, the user can be locked out. This is a common problem when an email address is used as the identifier. | Follow the steps outlined in the ["SAML authentication failed: User has already been taken"](#message-saml-authentication-failed-user-has-already-been-taken) section. If many users are affected, we recommend that you use the appropriate API. |
### I need to change my SAML app
-Users need to [unlink the previous SAML identity](#unlinking-accounts) and [link their identity](#user-access-and-management) using the new SAML app.
+Users will need to [unlink the current SAML identity](#unlinking-accounts) and [link their identity](#user-access-and-management) to the new SAML app.
### My identity provider isn't listed
diff --git a/doc/user/group/saml_sso/scim_setup.md b/doc/user/group/saml_sso/scim_setup.md
index 42bc52a9201..e333fd19c1b 100644
--- a/doc/user/group/saml_sso/scim_setup.md
+++ b/doc/user/group/saml_sso/scim_setup.md
@@ -167,7 +167,10 @@ As a workaround, try an alternate mapping:
### Message: "SAML authentication failed: Email has already been taken"
-It is expected for the app's logs to show this error for any existing user until they sign in for the first time. GitLab will not allow multiple accounts to have the same email address.
+This message may be caused by the following:
+
+- Existing users have not yet signed into the new app.
+- The identity provider attempts to create a new user account in GitLab with an email address that already exists in GitLab.com.
### How do I diagnose why a user is unable to sign in
@@ -197,15 +200,17 @@ Whether the value was changed or you need to map to a different field, ensure `i
If GitLab's `externalId` doesn't match the SAML NameId, it will need to be updated in order for the user to log in. Ideally your identity provider will be configured to do such an update, but in some cases it may be unable to do so, such as when looking up a user fails due to an ID change.
-Fixing the fields your SCIM identity provider sends as `id` and `externalId` can correct this, however we use these IDs to look up users so if the identity provider is unaware of the current values for these it may try to create new duplicate users instead.
-
-If the `externalId` we have stored for a user has an incorrect value that doesn't match the SAML NameId, then it can be corrected ine on or two ways.
-
-One option is to have users can be delinked and relink following details in the ["SAML authentication failed: User has already been taken"](./index.md#message-saml-authentication-failed-user-has-already-been-taken) section. Additionally, to unlink all users at once, remove all users from the SAML app while SCIM is still turned on.
+Be cautious if you revise the fields used by your SCIM identity provider, typically `id` and `externalId`.
+We use these IDs to look up users. If the identity provider does not know the current values for these fields,
+that provider may create duplicate users.
-Another option is with the manual use of the SCIM API.
+If the `externalId` for a user is not correct, and also doesn't match the SAML NameID,
+you can address the problem in the following ways:
-The [SCIM API](../../../api/scim.md#update-a-single-saml-user) can be used to manually correct the `externalId` stored for users so that it matches the SAML NameId. You'll need to know the desired value that matches the `NameId` as well as the current `externalId` to look up the user.
+- You can have users unlink and relink themselves, based on the ["SAML authentication failed: User has already been taken"](./index.md#message-saml-authentication-failed-user-has-already-been-taken) section.
+- You can unlink all users simultaneously, by removing all users from the SAML app while provisioning is turned on.
+- You can use the [SCIM API](../../../api/scim.md#update-a-single-saml-user) to manually correct the `externalId` stored for users to match the SAML `NameId`.
+ To look up a user, you'll need to know the desired value that matches the `NameId` as well as the current `externalId`.
It is then possible to issue a manual SCIM#update request, for example:
diff --git a/doc/user/packages/container_registry/index.md b/doc/user/packages/container_registry/index.md
index d6c6767a8fd..5505a4503ca 100644
--- a/doc/user/packages/container_registry/index.md
+++ b/doc/user/packages/container_registry/index.md
@@ -240,10 +240,10 @@ should look similar to this:
```yaml
build:
- image: docker:19.03.1
+ image: docker:19.03.8
stage: build
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker build -t $CI_REGISTRY/group/project/image:latest .
@@ -254,10 +254,10 @@ You can also make use of [other variables](../../../ci/variables/README.md) to a
```yaml
build:
- image: docker:19.03.1
+ image: docker:19.03.8
stage: build
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
script:
@@ -280,9 +280,9 @@ when needed. Changes to `master` also get tagged as `latest` and deployed using
an application-specific deploy script:
```yaml
-image: docker:19.03.1
+image: docker:19.03.8
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
stages:
- build
@@ -355,9 +355,9 @@ Below is an example of what your `.gitlab-ci.yml` should look like:
```yaml
build:
- image: $CI_REGISTRY/group/project/docker:19.03.1
+ image: $CI_REGISTRY/group/project/docker:19.03.8
services:
- - name: $CI_REGISTRY/group/project/docker:19.03.1-dind
+ - name: $CI_REGISTRY/group/project/docker:19.03.8-dind
alias: docker
stage: build
script:
@@ -365,7 +365,7 @@ Below is an example of what your `.gitlab-ci.yml` should look like:
- docker run my-docker-image /script/to/run/tests
```
-If you forget to set the service alias, the `docker:19.03.1` image won't find the
+If you forget to set the service alias, the `docker:19.03.8` image won't find the
`dind` service, and an error like the following will be thrown:
```plaintext
@@ -435,10 +435,10 @@ stages:
- clean
build_image:
- image: docker:19.03.1
+ image: docker:19.03.8
stage: build
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
variables:
IMAGE_TAG: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
script:
@@ -451,10 +451,10 @@ build_image:
- master
delete_image:
- image: docker:19.03.1
+ image: docker:19.03.8
stage: clean
services:
- - docker:19.03.1-dind
+ - docker:19.03.8-dind
variables:
IMAGE_TAG: $CI_PROJECT_PATH:$CI_COMMIT_REF_SLUG
REG_SHA256: ade837fc5224acd8c34732bf54a94f579b47851cc6a7fd5899a98386b782e228
diff --git a/doc/user/profile/account/delete_account.md b/doc/user/profile/account/delete_account.md
index 97827963be0..c9193c6d94c 100644
--- a/doc/user/profile/account/delete_account.md
+++ b/doc/user/profile/account/delete_account.md
@@ -32,18 +32,22 @@ As an administrator, you can delete a user account by:
- **Delete user and contributions** to delete the user and
their associated records.
+DANGER: **Danger:** Using the **Delete user and contributions** option may result
+in removing more data than intended. Please see [associated records](#associated-records)
+below for additional details.
+
## Associated Records
-> - Introduced for issues in
-> [GitLab 9.0](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/7393).
-> - Introduced for merge requests, award emoji, notes, and abuse reports in
-> [GitLab 9.1](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10467).
-> - Hard deletion from abuse reports and spam logs was introduced in
-> [GitLab 9.1](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10273),
-> and from the API in
-> [GitLab 9.3](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/11853).
+> - Introduced for issues in [GitLab 9.0](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/7393).
+> - Introduced for merge requests, award emoji, notes, and abuse reports in [GitLab 9.1](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10467).
+> - Hard deletion from abuse reports and spam logs was introduced in [GitLab 9.1](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/10273), and from the API in [GitLab 9.3](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/11853).
+
+There are two options for deleting users:
-When a user account is deleted, not all associated records are deleted with it.
+- **Delete user**
+- **Delete user and contributions**
+
+When using the **Delete user** option, not all associated records are deleted with the user.
Here's a list of things that will **not** be deleted:
- Issues that the user created.
@@ -57,6 +61,12 @@ user with the username "Ghost User", whose sole purpose is to act as a container
for such records. Any commits made by a deleted user will still display the
username of the original user.
+When using the **Delete user and contributions** option, **all** associated records
+are removed. This includes all of the items mentioned above including issues,
+merge requests, notes/comments, and more. Consider
+[blocking a user](../../admin_area/blocking_unblocking_users.md)
+or using the **Delete user** option instead.
+
When a user is deleted from an [abuse report](../../admin_area/abuse_reports.md)
or spam log, these associated
records are not ghosted and will be removed, along with any groups the user
diff --git a/doc/user/profile/personal_access_tokens.md b/doc/user/profile/personal_access_tokens.md
index 204230c4ca3..1223f7b801a 100644
--- a/doc/user/profile/personal_access_tokens.md
+++ b/doc/user/profile/personal_access_tokens.md
@@ -4,11 +4,11 @@ type: concepts, howto
# Personal access tokens
-> [Introduced][ce-3749] in GitLab 8.8.
+> [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/3749) in GitLab 8.8.
-If you're unable to use [OAuth2](../../api/oauth2.md), you can use a personal access token to authenticate with the [GitLab API][api].
+If you're unable to use [OAuth2](../../api/oauth2.md), you can use a personal access token to authenticate with the [GitLab API](../../api/README.md#personal-access-tokens).
-You can also use personal access tokens with Git to authenticate over HTTP or SSH. Personal access tokens are required when [Two-Factor Authentication (2FA)][2fa] is enabled. In both cases, you can authenticate with a token in place of your password.
+You can also use personal access tokens with Git to authenticate over HTTP or SSH. Personal access tokens are required when [Two-Factor Authentication (2FA)](../account/two_factor_authentication.md) is enabled. In both cases, you can authenticate with a token in place of your password.
Personal access tokens expire on the date you define, at midnight UTC.
@@ -41,21 +41,14 @@ the following table.
| Scope | Introduced in | Description |
| ------------------ | ------------- | ----------- |
-| `read_user` | [GitLab 8.15](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5951) | Allows access to the read-only endpoints under `/users`. Essentially, any of the `GET` requests in the [Users API][users] are allowed. |
+| `read_user` | [GitLab 8.15](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5951) | Allows access to the read-only endpoints under `/users`. Essentially, any of the `GET` requests in the [Users API](../../api/users.md) are allowed. |
| `api` | [GitLab 8.15](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/5951) | Grants complete read/write access to the API, including all groups and projects, the container registry, and the package registry. |
-| `read_api` | [GitLab 12.10](https://https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28944) | Grants read access to the API, including all groups and projects, the container registry, and the package registry. |
-| `read_registry` | [GitLab 9.3](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/11845) | Allows to read (pull) [container registry] images if a project is private and authorization is required. |
+| `read_api` | [GitLab 12.10](https://gitlab.com/gitlab-org/gitlab/-/merge_requests/28944) | Grants read access to the API, including all groups and projects, the container registry, and the package registry. |
+| `read_registry` | [GitLab 9.3](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/11845) | Allows to read (pull) [container registry](../packages/container_registry/index.md) images if a project is private and authorization is required. |
| `sudo` | [GitLab 10.2](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/14838) | Allows performing API actions as any user in the system (if the authenticated user is an admin). |
| `read_repository` | [GitLab 10.7](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17894) | Allows read-only access (pull) to the repository through `git clone`. |
| `write_repository` | [GitLab 11.11](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/26021) | Allows read-write access (pull, push) to the repository through `git clone`. Required for accessing Git repositories over HTTP when 2FA is enabled. |
-[2fa]: ../account/two_factor_authentication.md
-[api]: ../../api/README.md
-[ce-3749]: https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/3749
-[container registry]: ../packages/container_registry/index.md
-[users]: ../../api/users.md
-[usage]: ../../api/README.md#personal-access-tokens
-
<!-- ## Troubleshooting
Include any troubleshooting steps that you can foresee. If you know beforehand what issues
diff --git a/doc/user/project/clusters/img/kubernetes_pod_logs_v12_10.png b/doc/user/project/clusters/img/kubernetes_pod_logs_v12_10.png
new file mode 100644
index 00000000000..abac22e3f1f
--- /dev/null
+++ b/doc/user/project/clusters/img/kubernetes_pod_logs_v12_10.png
Binary files differ
diff --git a/doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.png b/doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.png
deleted file mode 100644
index 02b7cad1e3f..00000000000
--- a/doc/user/project/clusters/img/kubernetes_pod_logs_v12_9.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_10.png b/doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_10.png
new file mode 100644
index 00000000000..ee37970d867
--- /dev/null
+++ b/doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_10.png
Binary files differ
diff --git a/doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_5.png b/doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_5.png
deleted file mode 100644
index f113b0353f2..00000000000
--- a/doc/user/project/clusters/img/sidebar_menu_pod_logs_v12_5.png
+++ /dev/null
Binary files differ
diff --git a/doc/user/project/clusters/kubernetes_pod_logs.md b/doc/user/project/clusters/kubernetes_pod_logs.md
index e02c105e628..5543187b6de 100644
--- a/doc/user/project/clusters/kubernetes_pod_logs.md
+++ b/doc/user/project/clusters/kubernetes_pod_logs.md
@@ -14,7 +14,7 @@ Everything you need to build, test, deploy, and run your app at scale.
[Kubernetes](https://kubernetes.io) logs can be viewed directly within GitLab.
-![Pod logs](img/kubernetes_pod_logs_v12_9.png)
+![Pod logs](img/kubernetes_pod_logs_v12_10.png)
## Requirements
@@ -32,7 +32,7 @@ You can access them in two ways.
Go to **{cloud-gear}** **Operations > Logs** on the sidebar menu.
-![Sidebar menu](img/sidebar_menu_pod_logs_v12_5.png)
+![Sidebar menu](img/sidebar_menu_pod_logs_v12_10.png)
### From Deploy Boards
diff --git a/doc/user/project/deploy_tokens/img/deploy_tokens.png b/doc/user/project/deploy_tokens/img/deploy_tokens.png
index 493de8e0fce..afe1dfb922f 100644
--- a/doc/user/project/deploy_tokens/img/deploy_tokens.png
+++ b/doc/user/project/deploy_tokens/img/deploy_tokens.png
Binary files differ
diff --git a/doc/user/project/deploy_tokens/index.md b/doc/user/project/deploy_tokens/index.md
index 0bd511cf837..ebb12a6ed5d 100644
--- a/doc/user/project/deploy_tokens/index.md
+++ b/doc/user/project/deploy_tokens/index.md
@@ -2,8 +2,9 @@
> - [Introduced](https://gitlab.com/gitlab-org/gitlab-foss/-/merge_requests/17894) in GitLab 10.7.
> - [Moved](https://gitlab.com/gitlab-org/gitlab/issues/199370) from **Settings > Repository** in GitLab 12.9.
+> - [Added `write_registry` scope](https://gitlab.com/gitlab-org/gitlab/-/issues/22743) in GitLab 12.10.
-Deploy tokens allow you to download (`git clone`) or read the container registry images of a project without having a user and a password.
+Deploy tokens allow you to download (`git clone`) or push and pull the container registry images of a project without having a user and a password.
Deploy tokens can be managed by [maintainers only](../../permissions.md).
@@ -44,6 +45,7 @@ the following table.
| ----- | ----------- |
| `read_repository` | Allows read-access to the repository through `git clone` |
| `read_registry` | Allows read-access to [container registry](../../packages/container_registry/index.md) images if a project is private and authorization is required. |
+| `write_registry` | Allows write-access (push) to [container registry](../../packages/container_registry/index.md). |
## Deploy token custom username
@@ -83,6 +85,21 @@ docker login -u <username> -p <deploy_token> registry.example.com
Just replace `<username>` and `<deploy_token>` with the proper values. Then you can simply
pull images from your Container Registry.
+### Push Container Registry images
+
+To push the container registry images, you'll need to:
+
+1. Create a Deploy Token with `write_registry` as a scope.
+1. Take note of your `username` and `token`.
+1. Log in to GitLab’s Container Registry using the deploy token:
+
+ ```shell
+ docker login -u <username> -p <deploy_token> registry.example.com
+ ```
+
+Just replace `<username>` and `<deploy_token>` with the proper values. Then you can simply
+push images to your Container Registry.
+
### Group Deploy Token
> [Introduced](https://gitlab.com/gitlab-org/gitlab/issues/21765) in GitLab 12.9.
@@ -107,7 +124,7 @@ There's a special case when it comes to Deploy Tokens. If a user creates one
named `gitlab-deploy-token`, the username and token of the Deploy Token will be
automatically exposed to the CI/CD jobs as environment variables: `CI_DEPLOY_USER` and
`CI_DEPLOY_PASSWORD`, respectively. With the GitLab Deploy Token, the
-`read_registry` scope is implied.
+`read_registry` and `write_registry` scopes are implied.
After you create the token, you can login to the Container Registry using
those variables:
diff --git a/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md b/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md
index 42b1570d213..f80b741fb77 100644
--- a/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md
+++ b/doc/user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md
@@ -60,11 +60,11 @@ associated Pages domain. It also will be renewed automatically by GitLab.
## Troubleshooting
-### Error "Something went wrong while obtaining Let's Encrypt certificate"
+### Error "Something went wrong while obtaining the Let's Encrypt certificate"
> [Introduced](https://gitlab.com/gitlab-org/gitlab/-/issues/30146) in GitLab 13.0.
-If you get an error **Something went wrong while obtaining Let's Encrypt certificate**, you can try obtaining the certificate again by following these steps:
+If you get an error **Something went wrong while obtaining the Let's Encrypt certificate**, you can try obtaining the certificate again by following these steps:
1. Go to your project's **Settings > Pages**.
1. Click **Edit** on your domain.
diff --git a/doc/user/project/repository/forking_workflow.md b/doc/user/project/repository/forking_workflow.md
index 927c1db804a..c26f2bd6b1d 100644
--- a/doc/user/project/repository/forking_workflow.md
+++ b/doc/user/project/repository/forking_workflow.md
@@ -54,6 +54,9 @@ When you are ready to send your code back to the upstream project,
[create a merge request](../merge_requests/creating_merge_requests.md). For **Source branch**,
choose your forked project's branch. For **Target branch**, choose the original project's branch.
+NOTE: **Note:**
+When creating a merge request, if the forked project's visibility is more restrictive than the parent project (for example the fork is private, parent is public), the target branch will default to the forked project's default branch. This prevents potentially exposing private code of the forked project.
+
![Selecting branches](img/forking_workflow_branch_select.png)
Then you can add labels, a milestone, and assign the merge request to someone who can review
diff --git a/lib/api/api.rb b/lib/api/api.rb
index eb7f47de9e2..de9a3120d90 100644
--- a/lib/api/api.rb
+++ b/lib/api/api.rb
@@ -152,6 +152,7 @@ module API
mount ::API::Members
mount ::API::MergeRequestDiffs
mount ::API::MergeRequests
+ mount ::API::Metrics::Dashboard::Annotations
mount ::API::Namespaces
mount ::API::Notes
mount ::API::Discussions
diff --git a/lib/api/deploy_tokens.rb b/lib/api/deploy_tokens.rb
index d36b75f5bfd..5de36c14d7b 100644
--- a/lib/api/deploy_tokens.rb
+++ b/lib/api/deploy_tokens.rb
@@ -10,6 +10,7 @@ module API
result_hash = {}
result_hash[:read_registry] = scopes.include?('read_registry')
+ result_hash[:write_registry] = scopes.include?('write_registry')
result_hash[:read_repository] = scopes.include?('read_repository')
result_hash
end
@@ -54,7 +55,7 @@ module API
params do
requires :name, type: String, desc: "New deploy token's name"
requires :scopes, type: Array[String], values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
- desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository" or "read_registry".'
+ desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository", "read_registry", or "write_registry".'
optional :expires_at, type: DateTime, desc: 'Expiration date for the deploy token. Does not expire if no value is provided.'
optional :username, type: String, desc: 'Username for deploy token. Default is `gitlab+deploy-token-{n}`'
end
@@ -117,7 +118,7 @@ module API
params do
requires :name, type: String, desc: 'The name of the deploy token'
requires :scopes, type: Array[String], values: ::DeployToken::AVAILABLE_SCOPES.map(&:to_s),
- desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository" or "read_registry".'
+ desc: 'Indicates the deploy token scopes. Must be at least one of "read_repository", "read_registry", or "write_registry".'
optional :expires_at, type: DateTime, desc: 'Expiration date for the deploy token. Does not expire if no value is provided.'
optional :username, type: String, desc: 'Username for deploy token. Default is `gitlab+deploy-token-{n}`'
end
diff --git a/lib/api/entities/metrics/dashboard/annotation.rb b/lib/api/entities/metrics/dashboard/annotation.rb
new file mode 100644
index 00000000000..66bd09d84f9
--- /dev/null
+++ b/lib/api/entities/metrics/dashboard/annotation.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module API
+ module Entities
+ module Metrics
+ module Dashboard
+ class Annotation < Grape::Entity
+ expose :id
+ expose :starting_at
+ expose :ending_at
+ expose :dashboard_path
+ expose :description
+ expose :environment_id
+ expose :cluster_id
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/entities/project_import_status.rb b/lib/api/entities/project_import_status.rb
index 9b7a2bd1f3e..de7b4b998be 100644
--- a/lib/api/entities/project_import_status.rb
+++ b/lib/api/entities/project_import_status.rb
@@ -5,7 +5,7 @@ module API
class ProjectImportStatus < ProjectIdentity
expose :import_status
expose :correlation_id do |project, _options|
- project.import_state.correlation_id
+ project.import_state&.correlation_id
end
# TODO: Use `expose_nil` once we upgrade the grape-entity gem
diff --git a/lib/api/group_clusters.rb b/lib/api/group_clusters.rb
index 0108f6feae3..2c12c6387fb 100644
--- a/lib/api/group_clusters.rb
+++ b/lib/api/group_clusters.rb
@@ -53,6 +53,7 @@ module API
requires :name, type: String, desc: 'Cluster name'
optional :enabled, type: Boolean, default: true, desc: 'Determines if cluster is active or not, defaults to true'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :managed, type: Boolean, default: true, desc: 'Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true'
requires :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
requires :api_url, type: String, allow_blank: false, desc: 'URL to access the Kubernetes API'
diff --git a/lib/api/metrics/dashboard/annotations.rb b/lib/api/metrics/dashboard/annotations.rb
new file mode 100644
index 00000000000..691abac863a
--- /dev/null
+++ b/lib/api/metrics/dashboard/annotations.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+module API
+ module Metrics
+ module Dashboard
+ class Annotations < Grape::API
+ desc 'Create a new monitoring dashboard annotation' do
+ success Entities::Metrics::Dashboard::Annotation
+ end
+
+ params do
+ requires :starting_at, type: DateTime,
+ desc: 'Date time indicating starting moment to which the annotation relates.'
+ optional :ending_at, type: DateTime,
+ desc: 'Date time indicating ending moment to which the annotation relates.'
+ requires :dashboard_path, type: String,
+ desc: 'The path to a file defining the dashboard on which the annotation should be added'
+ requires :description, type: String, desc: 'The description of the annotation'
+ end
+
+ resource :environments do
+ post ':id/metrics_dashboard/annotations' do
+ environment = ::Environment.find(params[:id])
+
+ not_found! unless Feature.enabled?(:metrics_dashboard_annotations, environment.project)
+
+ forbidden! unless can?(current_user, :create_metrics_dashboard_annotation, environment)
+
+ result = ::Metrics::Dashboard::Annotations::CreateService.new(current_user, declared(params).merge(environment: environment)).execute
+
+ if result[:status] == :success
+ present result[:annotation], with: Entities::Metrics::Dashboard::Annotation
+ else
+ error!(result, 400)
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/api/project_clusters.rb b/lib/api/project_clusters.rb
index b482980b88a..299301aabc4 100644
--- a/lib/api/project_clusters.rb
+++ b/lib/api/project_clusters.rb
@@ -56,6 +56,7 @@ module API
requires :name, type: String, desc: 'Cluster name'
optional :enabled, type: Boolean, default: true, desc: 'Determines if cluster is active or not, defaults to true'
optional :domain, type: String, desc: 'Cluster base domain'
+ optional :management_project_id, type: Integer, desc: 'The ID of the management project'
optional :managed, type: Boolean, default: true, desc: 'Determines if GitLab will manage namespaces and service accounts for this cluster, defaults to true'
requires :platform_kubernetes_attributes, type: Hash, desc: %q(Platform Kubernetes data) do
requires :api_url, type: String, allow_blank: false, desc: 'URL to access the Kubernetes API'
diff --git a/lib/gitlab/application_rate_limiter.rb b/lib/gitlab/application_rate_limiter.rb
index c1066d8fa62..2defbd26b98 100644
--- a/lib/gitlab/application_rate_limiter.rb
+++ b/lib/gitlab/application_rate_limiter.rb
@@ -19,8 +19,9 @@ module Gitlab
# and only do that when it's needed.
def rate_limits
{
- project_export: { threshold: 1, interval: 5.minutes },
- project_download_export: { threshold: 10, interval: 10.minutes },
+ issues_create: { threshold: -> { Gitlab::CurrentSettings.current_application_settings.issues_create_limit }, interval: 1.minute },
+ project_export: { threshold: 1, interval: 5.minutes },
+ project_download_export: { threshold: 10, interval: 10.minutes },
project_repositories_archive: { threshold: 5, interval: 1.minute },
project_generate_new_export: { threshold: 1, interval: 5.minutes },
project_import: { threshold: 30, interval: 5.minutes },
diff --git a/lib/gitlab/auth.rb b/lib/gitlab/auth.rb
index c489c835d9d..8e14d21f591 100644
--- a/lib/gitlab/auth.rb
+++ b/lib/gitlab/auth.rb
@@ -12,7 +12,7 @@ module Gitlab
REPOSITORY_SCOPES = [:read_repository, :write_repository].freeze
# Scopes used for GitLab Docker Registry access
- REGISTRY_SCOPES = [:read_registry].freeze
+ REGISTRY_SCOPES = [:read_registry, :write_registry].freeze
# Scopes used for GitLab as admin
ADMIN_SCOPES = [:sudo].freeze
@@ -200,6 +200,7 @@ module Gitlab
api: full_authentication_abilities,
read_api: read_only_authentication_abilities,
read_registry: [:read_container_image],
+ write_registry: [:create_container_image],
read_repository: [:download_code],
write_repository: [:download_code, :push_code]
}
diff --git a/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments.rb b/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments.rb
new file mode 100644
index 00000000000..9778f360e87
--- /dev/null
+++ b/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module BackgroundMigration
+ # Backfill deployment_clusters for a range of deployments
+ class BackfillDeploymentClustersFromDeployments
+ def perform(start_id, end_id)
+ ActiveRecord::Base.connection.execute <<~SQL
+ INSERT INTO deployment_clusters (deployment_id, cluster_id)
+ SELECT deployments.id, deployments.cluster_id
+ FROM deployments
+ WHERE deployments.cluster_id IS NOT NULL
+ AND deployments.id BETWEEN #{start_id} AND #{end_id}
+ ON CONFLICT DO NOTHING
+ SQL
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
index 10ef33e71d5..0e3d7660bdf 100644
--- a/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
+++ b/lib/gitlab/ci/templates/Security/DAST.gitlab-ci.yml
@@ -22,6 +22,7 @@ dast:
allow_failure: true
script:
- export DAST_WEBSITE=${DAST_WEBSITE:-$(cat environment_url.txt)}
+ - if [ -z "$DAST_WEBSITE$DAST_API_SPECIFICATION" ]; then echo "Either DAST_WEBSITE or DAST_API_SPECIFICATION must be set. See https://docs.gitlab.com/ee/user/application_security/dast/#configuration for more details." && exit 1; fi
- /analyze
artifacts:
reports:
diff --git a/lib/gitlab/database/batch_count.rb b/lib/gitlab/database/batch_count.rb
index 3eb0197d178..2359dceae48 100644
--- a/lib/gitlab/database/batch_count.rb
+++ b/lib/gitlab/database/batch_count.rb
@@ -37,6 +37,7 @@ module Gitlab
MIN_REQUIRED_BATCH_SIZE = 1_250
MAX_ALLOWED_LOOPS = 10_000
SLEEP_TIME_IN_SECONDS = 0.01 # 10 msec sleep
+ ALLOWED_MODES = [:itself, :distinct].freeze
# Each query should take < 500ms https://gitlab.com/gitlab-org/gitlab/-/merge_requests/22705
DEFAULT_DISTINCT_BATCH_SIZE = 10_000
@@ -55,8 +56,8 @@ module Gitlab
def count(batch_size: nil, mode: :itself, start: nil, finish: nil)
raise 'BatchCount can not be run inside a transaction' if ActiveRecord::Base.connection.transaction_open?
- raise "The mode #{mode.inspect} is not supported" unless [:itself, :distinct].include?(mode)
- raise 'Use distinct count for optimized distinct counting' if @relation.limit(1).distinct_value.present? && mode != :distinct
+
+ check_mode!(mode)
# non-distinct have better performance
batch_size ||= mode == :distinct ? DEFAULT_DISTINCT_BATCH_SIZE : DEFAULT_BATCH_SIZE
@@ -102,6 +103,12 @@ module Gitlab
def actual_finish(finish)
finish || @relation.maximum(@column) || 0
end
+
+ def check_mode!(mode)
+ raise "The mode #{mode.inspect} is not supported" unless ALLOWED_MODES.include?(mode)
+ raise 'Use distinct count for optimized distinct counting' if @relation.limit(1).distinct_value.present? && mode != :distinct
+ raise 'Use distinct count only with non id fields' if @column == :id && mode == :distinct
+ end
end
end
end
diff --git a/lib/gitlab/database/migration_helpers.rb b/lib/gitlab/database/migration_helpers.rb
index dc4de9b1906..3922f5c6683 100644
--- a/lib/gitlab/database/migration_helpers.rb
+++ b/lib/gitlab/database/migration_helpers.rb
@@ -1178,8 +1178,147 @@ into similar problems in the future (e.g. when new tables are created).
end
end
+ # Returns the name for a check constraint
+ #
+ # type:
+ # - Any value, as long as it is unique
+ # - Constraint names are unique per table in Postgres, and, additionally,
+ # we can have multiple check constraints over a column
+ # So we use the (table, column, type) triplet as a unique name
+ # - e.g. we use 'max_length' when adding checks for text limits
+ # or 'not_null' when adding a NOT NULL constraint
+ #
+ def check_constraint_name(table, column, type)
+ identifier = "#{table}_#{column}_check_#{type}"
+ # Check concurrent_foreign_key_name() for info on why we use a hash
+ hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
+
+ "check_#{hashed_identifier}"
+ end
+
+ def check_constraint_exists?(table, constraint_name)
+ # Constraint names are unique per table in Postgres, not per schema
+ # Two tables can have constraints with the same name, so we filter by
+ # the table name in addition to using the constraint_name
+ check_sql = <<~SQL
+ SELECT COUNT(*)
+ FROM pg_constraint
+ JOIN pg_class ON pg_constraint.conrelid = pg_class.oid
+ WHERE pg_constraint.contype = 'c'
+ AND pg_constraint.conname = '#{constraint_name}'
+ AND pg_class.relname = '#{table}'
+ SQL
+
+ connection.select_value(check_sql).positive?
+ end
+
+ # Adds a check constraint to a table
+ #
+ # This method is the generic helper for adding any check constraint
+ # More specialized helpers may use it (e.g. add_text_limit or add_not_null)
+ #
+ # This method only requires minimal locking:
+ # - The constraint is added using NOT VALID
+ # This allows us to add the check constraint without validating it
+ # - The check will be enforced for new data (inserts) coming in
+ # - If `validate: true` the constraint is also validated
+ # Otherwise, validate_check_constraint() can be used at a later stage
+ # - Check comments on add_concurrent_foreign_key for more info
+ #
+ # table - The table the constraint will be added to
+ # check - The check clause to add
+ # e.g. 'char_length(name) <= 5' or 'store IS NOT NULL'
+ # constraint_name - The name of the check constraint (otherwise auto-generated)
+ # Should be unique per table (not per column)
+ # validate - Whether to validate the constraint in this call
+ #
+ # rubocop:disable Gitlab/RailsLogger
+ def add_check_constraint(table, check, constraint_name, validate: true)
+ # Transactions would result in ALTER TABLE locks being held for the
+ # duration of the transaction, defeating the purpose of this method.
+ if transaction_open?
+ raise 'add_check_constraint can not be run inside a transaction'
+ end
+
+ if check_constraint_exists?(table, constraint_name)
+ warning_message = <<~MESSAGE
+ Check constraint was not created because it exists already
+ (this may be due to an aborted migration or similar)
+ table: #{table}, check: #{check}, constraint name: #{constraint_name}
+ MESSAGE
+
+ Rails.logger.warn warning_message
+ else
+ # Only add the constraint without validating it
+ # Even though it is fast, ADD CONSTRAINT requires an EXCLUSIVE lock
+ # Use with_lock_retries to make sure that this operation
+ # will not timeout on tables accessed by many processes
+ with_lock_retries do
+ execute <<-EOF.strip_heredoc
+ ALTER TABLE #{table}
+ ADD CONSTRAINT #{constraint_name}
+ CHECK ( #{check} )
+ NOT VALID;
+ EOF
+ end
+ end
+
+ if validate
+ validate_check_constraint(table, constraint_name)
+ end
+ end
+
+ def validate_check_constraint(table, constraint_name)
+ unless check_constraint_exists?(table, constraint_name)
+ raise missing_schema_object_message(table, "check constraint", constraint_name)
+ end
+
+ disable_statement_timeout do
+ # VALIDATE CONSTRAINT only requires a SHARE UPDATE EXCLUSIVE LOCK
+ # It only conflicts with other validations and creating indexes
+ execute("ALTER TABLE #{table} VALIDATE CONSTRAINT #{constraint_name};")
+ end
+ end
+
+ def remove_check_constraint(table, constraint_name)
+ # DROP CONSTRAINT requires an EXCLUSIVE lock
+ # Use with_lock_retries to make sure that this will not timeout
+ with_lock_retries do
+ execute <<-EOF.strip_heredoc
+ ALTER TABLE #{table}
+ DROP CONSTRAINT IF EXISTS #{constraint_name}
+ EOF
+ end
+ end
+
+ # Migration Helpers for adding limit to text columns
+ def add_text_limit(table, column, limit, constraint_name: nil, validate: true)
+ add_check_constraint(
+ table,
+ "char_length(#{column}) <= #{limit}",
+ text_limit_name(table, column, name: constraint_name),
+ validate: validate
+ )
+ end
+
+ def validate_text_limit(table, column, constraint_name: nil)
+ validate_check_constraint(table, text_limit_name(table, column, name: constraint_name))
+ end
+
+ def remove_text_limit(table, column, constraint_name: nil)
+ remove_check_constraint(table, text_limit_name(table, column, name: constraint_name))
+ end
+
+ def check_text_limit_exists?(table, column, constraint_name: nil)
+ check_constraint_exists?(table, text_limit_name(table, column, name: constraint_name))
+ end
+
private
+ def text_limit_name(table, column, name: nil)
+ name.presence || check_constraint_name(table, column, 'max_length')
+ end
+
def missing_schema_object_message(table, type, name)
<<~MESSAGE
Could not find #{type} "#{name}" on table "#{table}" which was referenced during the migration.
diff --git a/lib/gitlab/error_tracking.rb b/lib/gitlab/error_tracking.rb
index d20324a613e..a6e49825fd0 100644
--- a/lib/gitlab/error_tracking.rb
+++ b/lib/gitlab/error_tracking.rb
@@ -2,6 +2,21 @@
module Gitlab
module ErrorTracking
+ # Exceptions in this group will receive custom Sentry fingerprinting
+ CUSTOM_FINGERPRINTING = %w[
+ Acme::Client::Error::BadNonce
+ Acme::Client::Error::NotFound
+ Acme::Client::Error::RateLimited
+ Acme::Client::Error::Timeout
+ Acme::Client::Error::UnsupportedOperation
+ ActiveRecord::ConnectionTimeoutError
+ ActiveRecord::QueryCanceled
+ Gitlab::RequestContext::RequestDeadlineExceeded
+ GRPC::DeadlineExceeded
+ JIRA::HTTPError
+ Rack::Timeout::RequestTimeoutException
+ ].freeze
+
class << self
def configure
Raven.configure do |config|
@@ -14,8 +29,7 @@ module Gitlab
# Sanitize authentication headers
config.sanitize_http_headers = %w[Authorization Private-Token]
config.tags = { program: Gitlab.process_name }
- # Debugging for https://gitlab.com/gitlab-org/gitlab-foss/issues/57727
- config.before_send = method(:add_context_from_exception_type)
+ config.before_send = method(:before_send)
end
end
@@ -92,6 +106,13 @@ module Gitlab
private
+ def before_send(event, hint)
+ event = add_context_from_exception_type(event, hint)
+ event = custom_fingerprinting(event, hint)
+
+ event
+ end
+
def process_exception(exception, sentry: false, logging: true, extra:)
exception.try(:sentry_extra_data)&.tap do |data|
extra = extra.merge(data) if data.is_a?(Hash)
@@ -142,6 +163,7 @@ module Gitlab
}
end
+ # Debugging for https://gitlab.com/gitlab-org/gitlab-foss/issues/57727
def add_context_from_exception_type(event, hint)
if ActiveModel::MissingAttributeError === hint[:exception]
columns_hash = ActiveRecord::Base
@@ -156,6 +178,18 @@ module Gitlab
event
end
+
+ # Group common, mostly non-actionable exceptions by type and message,
+ # rather than cause
+ def custom_fingerprinting(event, hint)
+ ex = hint[:exception]
+
+ return event unless CUSTOM_FINGERPRINTING.include?(ex.class.name)
+
+ event.fingerprint = ['{{ default }}', ex.class.name, ex.message]
+
+ event
+ end
end
end
end
diff --git a/lib/gitlab/jira_import/labels_importer.rb b/lib/gitlab/jira_import/labels_importer.rb
index 142a2da5be9..35c434e48a4 100644
--- a/lib/gitlab/jira_import/labels_importer.rb
+++ b/lib/gitlab/jira_import/labels_importer.rb
@@ -11,28 +11,19 @@ module Gitlab
end
def execute
- create_import_label(project)
+ cache_import_label(project)
import_jira_labels
end
private
- def create_import_label(project)
- label = Labels::CreateService.new(build_label_attrs(project)).execute(project: project)
- raise Projects::ImportService::Error, _('Failed to create import label for jira import.') unless label
+ def cache_import_label(project)
+ label = project.jira_imports.by_jira_project_key(jira_project_key).last.label
+ raise Projects::ImportService::Error, _('Failed to find import label for jira import.') unless label
JiraImport.cache_import_label_id(project.id, label.id)
end
- def build_label_attrs(project)
- import_start_time = project&.import_state&.last_update_started_at || Time.now
- title = "jira-import-#{import_start_time.strftime('%Y-%m-%d-%H-%M-%S')}"
- description = "Label for issues that were imported from jira on #{import_start_time.strftime('%Y-%m-%d %H:%M:%S')}"
- color = "#{Label.color_for(title)}"
-
- { title: title, description: description, color: color }
- end
-
def import_jira_labels
# todo: import jira labels, see https://gitlab.com/gitlab-org/gitlab/-/issues/212651
job_waiter
diff --git a/lib/gitlab/legacy_github_import/client.rb b/lib/gitlab/legacy_github_import/client.rb
index 34634d20a16..f7eaafeb446 100644
--- a/lib/gitlab/legacy_github_import/client.rb
+++ b/lib/gitlab/legacy_github_import/client.rb
@@ -6,13 +6,14 @@ module Gitlab
GITHUB_SAFE_REMAINING_REQUESTS = 100
GITHUB_SAFE_SLEEP_TIME = 500
- attr_reader :access_token, :host, :api_version
+ attr_reader :access_token, :host, :api_version, :wait_for_rate_limit_reset
- def initialize(access_token, host: nil, api_version: 'v3')
+ def initialize(access_token, host: nil, api_version: 'v3', wait_for_rate_limit_reset: true)
@access_token = access_token
@host = host.to_s.sub(%r{/+\z}, '')
@api_version = api_version
@users = {}
+ @wait_for_rate_limit_reset = wait_for_rate_limit_reset
if access_token
::Octokit.auto_paginate = false
@@ -120,7 +121,7 @@ module Gitlab
end
def request(method, *args, &block)
- sleep rate_limit_sleep_time if rate_limit_exceed?
+ sleep rate_limit_sleep_time if wait_for_rate_limit_reset && rate_limit_exceed?
data = api.__send__(method, *args) # rubocop:disable GitlabSecurity/PublicSend
return data unless data.is_a?(Array)
diff --git a/lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb b/lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb
new file mode 100644
index 00000000000..38736158c3b
--- /dev/null
+++ b/lib/gitlab/metrics/dashboard/stages/alerts_inserter.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'set'
+
+module Gitlab
+ module Metrics
+ module Dashboard
+ module Stages
+ class AlertsInserter < BaseStage
+ include ::Gitlab::Utils::StrongMemoize
+
+ def transform!
+ return if metrics_with_alerts.empty?
+
+ for_metrics do |metric|
+ next unless metrics_with_alerts.include?(metric[:metric_id])
+
+ metric[:alert_path] = alert_path(metric[:metric_id], project, params[:environment])
+ end
+ end
+
+ private
+
+ def metrics_with_alerts
+ strong_memoize(:metrics_with_alerts) do
+ alerts = ::Projects::Prometheus::AlertsFinder
+ .new(project: project, environment: params[:environment])
+ .execute
+
+ Set.new(alerts.map(&:prometheus_metric_id))
+ end
+ end
+
+ def alert_path(metric_id, project, environment)
+ ::Gitlab::Routing.url_helpers.project_prometheus_alert_path(project, metric_id, environment_id: environment.id, format: :json)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/lib/gitlab/path_regex.rb b/lib/gitlab/path_regex.rb
index df48e347511..706c16f6149 100644
--- a/lib/gitlab/path_regex.rb
+++ b/lib/gitlab/path_regex.rb
@@ -57,6 +57,10 @@ module Gitlab
v2
].freeze
+ # NOTE: Do not add new items to this list unless necessary as this will
+ # cause conflicts with existing namespaced routes for groups or projects.
+ # See https://docs.gitlab.com/ee/development/routing.html#project-routes
+ #
# This list should contain all words following `/*namespace_id/:project_id` in
# routes that contain a second wildcard.
#
@@ -98,12 +102,15 @@ module Gitlab
preview
raw
refs
- sse
tree
update
wikis
].freeze
+ # NOTE: Do not add new items to this list unless necessary as this will
+ # cause conflicts with existing namespaced routes for groups or projects.
+ # See https://docs.gitlab.com/ee/development/routing.html#group-routes
+ #
# These are all the paths that follow `/groups/*id/ or `/groups/*group_id`
# We need to reject these because we have a `/groups/*id` page that is the same
# as the `/*id`.
diff --git a/lib/gitlab/set_cache.rb b/lib/gitlab/set_cache.rb
index d1151a431bb..e891b805879 100644
--- a/lib/gitlab/set_cache.rb
+++ b/lib/gitlab/set_cache.rb
@@ -64,7 +64,9 @@ module Gitlab
else
redis.del(*keys)
end
- rescue ::Redis::CommandError
+ rescue ::Redis::CommandError => e
+ Gitlab::ErrorTracking.log_exception(e)
+
redis.del(*keys)
end
end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs.rb
deleted file mode 100644
index 7a77a56d642..00000000000
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'digest'
-
-module Gitlab
- module SidekiqMiddleware
- module DuplicateJobs
- DROPPABLE_QUEUES = Set.new([
- Namespaces::RootStatisticsWorker.queue,
- Namespaces::ScheduleAggregationWorker.queue
- ]).freeze
-
- def self.drop_duplicates?(queue_name)
- Feature.enabled?(:drop_duplicate_sidekiq_jobs) ||
- drop_duplicates_for_queue?(queue_name)
- end
-
- private_class_method def self.drop_duplicates_for_queue?(queue_name)
- DROPPABLE_QUEUES.include?(queue_name) &&
- Feature.enabled?(:drop_duplicate_sidekiq_jobs_for_queue)
- end
- end
- end
-end
diff --git a/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
index a9007039334..79bbb99752e 100644
--- a/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
+++ b/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job.rb
@@ -67,7 +67,7 @@ module Gitlab
end
def droppable?
- idempotent? && duplicate? && DuplicateJobs.drop_duplicates?(queue_name)
+ idempotent? && duplicate?
end
private
diff --git a/lib/gitlab/static_site_editor/config.rb b/lib/gitlab/static_site_editor/config.rb
new file mode 100644
index 00000000000..4bc0fc95abd
--- /dev/null
+++ b/lib/gitlab/static_site_editor/config.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+module Gitlab
+ module StaticSiteEditor
+ class Config
+ def initialize(repository, ref, file_path, return_url)
+ @repository = repository
+ @ref = ref
+ @file_path = file_path
+ @return_url = return_url
+ end
+
+ def payload
+ {
+ branch: ref,
+ path: file_path,
+ commit: commit.id,
+ project_id: project.id,
+ project: project.path,
+ namespace: project.namespace.path,
+ return_url: return_url
+ }
+ end
+
+ private
+
+ attr_reader :repository, :ref, :file_path, :return_url
+
+ delegate :project, to: :repository
+
+ def commit
+ repository.commit(ref)
+ end
+ end
+ end
+end
diff --git a/locale/gitlab.pot b/locale/gitlab.pot
index 52446d172f5..58d2b4b9586 100644
--- a/locale/gitlab.pot
+++ b/locale/gitlab.pot
@@ -888,9 +888,15 @@ msgstr ""
msgid "A subscription will trigger a new pipeline on the default branch of this project when a pipeline successfully completes for a new tag on the %{default_branch_docs} of the subscribed project."
msgstr ""
+msgid "A terraform report was generated in your pipelines."
+msgstr ""
+
msgid "A user with write access to the source branch selected this option"
msgstr ""
+msgid "ACTION REQUIRED: Something went wrong while obtaining the Let's Encrypt certificate for GitLab Pages domain '%{domain}'"
+msgstr ""
+
msgid "API Help"
msgstr ""
@@ -2008,6 +2014,9 @@ msgstr ""
msgid "An error occurred while loading merge requests."
msgstr ""
+msgid "An error occurred while loading terraform report"
+msgstr ""
+
msgid "An error occurred while loading the data. Please try again."
msgstr ""
@@ -3412,6 +3421,9 @@ msgstr ""
msgid "Cannot have multiple Jira imports running at the same time"
msgstr ""
+msgid "Cannot import because issues are not available in this project."
+msgstr ""
+
msgid "Cannot make epic confidential if it contains not-confidential issues"
msgstr ""
@@ -3526,6 +3538,9 @@ msgstr ""
msgid "Changes are shown as if the <b>source</b> revision was being merged into the <b>target</b> revision."
msgstr ""
+msgid "Changes are unknown"
+msgstr ""
+
msgid "Changes suppressed. Click to show."
msgstr ""
@@ -5322,6 +5337,9 @@ msgstr ""
msgid "Configure existing installation"
msgstr ""
+msgid "Configure limit for issues created per minute by web and API requests."
+msgstr ""
+
msgid "Configure limits for web and API requests."
msgstr ""
@@ -6826,6 +6844,9 @@ msgstr ""
msgid "DeployTokens|Allows read-only access to the repository"
msgstr ""
+msgid "DeployTokens|Allows write access to the registry images"
+msgstr ""
+
msgid "DeployTokens|Copy deploy token"
msgstr ""
@@ -6844,7 +6865,7 @@ msgstr ""
msgid "DeployTokens|Deploy Tokens"
msgstr ""
-msgid "DeployTokens|Deploy tokens allow read-only access to your repository and registry images."
+msgid "DeployTokens|Deploy tokens allow access to your repository and registry images."
msgstr ""
msgid "DeployTokens|Expires"
@@ -7860,9 +7881,6 @@ msgstr ""
msgid "EnvironmentsDashboard|This dashboard displays a maximum of 7 projects and 3 environments per project. %{readMoreLink}"
msgstr ""
-msgid "Environments|All pods"
-msgstr ""
-
msgid "Environments|An error occurred while canceling the auto stop, please try again"
msgstr ""
@@ -7929,9 +7947,6 @@ msgstr ""
msgid "Environments|Environments are places where code gets deployed, such as staging or production."
msgstr ""
-msgid "Environments|Filter by pod"
-msgstr ""
-
msgid "Environments|Install Elastic Stack on your cluster to enable advanced querying capabilities such as full text search."
msgstr ""
@@ -7971,6 +7986,9 @@ msgstr ""
msgid "Environments|Open live environment"
msgstr ""
+msgid "Environments|Pod name"
+msgstr ""
+
msgid "Environments|Re-deploy"
msgstr ""
@@ -7998,9 +8016,6 @@ msgstr ""
msgid "Environments|Rollback environment %{name}?"
msgstr ""
-msgid "Environments|Search"
-msgstr ""
-
msgid "Environments|Select environment"
msgstr ""
@@ -8655,6 +8670,9 @@ msgstr ""
msgid "Failed to enqueue the rebase operation, possibly due to a long-lived transaction. Try again later."
msgstr ""
+msgid "Failed to find import label for jira import."
+msgstr ""
+
msgid "Failed to get ref."
msgstr ""
@@ -9708,6 +9726,9 @@ msgstr ""
msgid "Git version"
msgstr ""
+msgid "GitHub API rate limit exceeded. Try again after %{reset_time}"
+msgstr ""
+
msgid "GitHub import"
msgstr ""
@@ -9780,6 +9801,9 @@ msgstr ""
msgid "GitLab.com import"
msgstr ""
+msgid "GitLabPagesDomains|Retry"
+msgstr ""
+
msgid "GitLabPages|%{domain} is not verified. To learn how to verify ownership, visit your %{link_start}domain details%{link_end}."
msgstr ""
@@ -9846,7 +9870,7 @@ msgstr ""
msgid "GitLabPages|Save"
msgstr ""
-msgid "GitLabPages|Something went wrong while obtaining Let's Encrypt certificate for %{domain}. To retry visit your %{link_start}domain details%{link_end}."
+msgid "GitLabPages|Something went wrong while obtaining the Let's Encrypt certificate for %{domain}. To retry visit your %{link_start}domain details%{link_end}."
msgstr ""
msgid "GitLabPages|Support for domains and certificates is disabled. Ask your system's administrator to enable it."
@@ -11376,6 +11400,9 @@ msgstr ""
msgid "Issues Analytics"
msgstr ""
+msgid "Issues Rate Limits"
+msgstr ""
+
msgid "Issues can be bugs, tasks or ideas to be discussed. Also, issues are searchable and filterable."
msgstr ""
@@ -12061,6 +12088,9 @@ msgstr ""
msgid "Licenses|Detected in Project"
msgstr ""
+msgid "Licenses|Detected licenses that are out-of-compliance with the project's assigned policies"
+msgstr ""
+
msgid "Licenses|Displays licenses detected in the project, based on the %{linkStart}latest successful%{linkEnd} scan"
msgstr ""
@@ -12082,6 +12112,9 @@ msgstr ""
msgid "Licenses|Policy"
msgstr ""
+msgid "Licenses|Policy violation: denied"
+msgstr ""
+
msgid "Licenses|Specified policies in this project"
msgstr ""
@@ -12135,6 +12168,9 @@ msgstr ""
msgid "Link title"
msgstr ""
+msgid "Link title is required"
+msgstr ""
+
msgid "Linked emails (%{email_count})"
msgstr ""
@@ -12183,9 +12219,6 @@ msgstr ""
msgid "Loading"
msgstr ""
-msgid "Loading blob"
-msgstr ""
-
msgid "Loading contribution stats for group members"
msgstr ""
@@ -12923,7 +12956,7 @@ msgstr ""
msgid "Metrics|Prometheus Query Documentation"
msgstr ""
-msgid "Metrics|Reload this page"
+msgid "Metrics|Refresh dashboard"
msgstr ""
msgid "Metrics|Show last"
@@ -12935,9 +12968,15 @@ msgstr ""
msgid "Metrics|There was an error creating the dashboard. %{error}"
msgstr ""
+msgid "Metrics|There was an error fetching annotations. Please try again."
+msgstr ""
+
msgid "Metrics|There was an error fetching the environments data, please try again"
msgstr ""
+msgid "Metrics|There was an error getting annotations information."
+msgstr ""
+
msgid "Metrics|There was an error getting deployment information."
msgstr ""
@@ -14894,6 +14933,12 @@ msgstr ""
msgid "Please fill in a descriptive name for your group."
msgstr ""
+msgid "Please follow the %{link_start}Let's Encrypt troubleshooting instructions%{link_end} to re-obtain your Let's Encrypt certificate."
+msgstr ""
+
+msgid "Please follow the Let's Encrypt troubleshooting instructions to re-obtain your Let's Encrypt certificate: %{docs_url}."
+msgstr ""
+
msgid "Please migrate all existing projects to hashed storage to avoid security issues and ensure data integrity. %{migrate_link}"
msgstr ""
@@ -14969,7 +15014,7 @@ msgstr ""
msgid "Pods in use"
msgstr ""
-msgid "Point to any links you like: documentation, built binaries, or other related materials. These can be internal or external links from your GitLab instance."
+msgid "Point to any links you like: documentation, built binaries, or other related materials. These can be internal or external links from your GitLab instance. Duplicate URLs are not allowed."
msgstr ""
msgid "Preferences"
@@ -15632,6 +15677,12 @@ msgstr ""
msgid "Project name"
msgstr ""
+msgid "Project name suffix"
+msgstr ""
+
+msgid "Project name suffix is a user-defined string which will be appended to the project path, and will form the Service Desk email address."
+msgstr ""
+
msgid "Project order will not be saved as local storage is not available."
msgstr ""
@@ -17019,6 +17070,9 @@ msgstr ""
msgid "Reported %{timeAgo} by %{reportedBy}"
msgstr ""
+msgid "Reported Resource Changes: %{addNum} to add, %{changeNum} to change, %{deleteNum} to delete"
+msgstr ""
+
msgid "Reporter"
msgstr ""
@@ -17163,12 +17217,30 @@ msgstr ""
msgid "Requirement"
msgstr ""
+msgid "Requirement %{reference} has been added"
+msgstr ""
+
+msgid "Requirement %{reference} has been archived"
+msgstr ""
+
+msgid "Requirement %{reference} has been reopened"
+msgstr ""
+
+msgid "Requirement %{reference} has been updated"
+msgstr ""
+
msgid "Requirement title cannot have more than %{limit} characters."
msgstr ""
msgid "Requirements"
msgstr ""
+msgid "Requirements allow you to create criteria to check your products against."
+msgstr ""
+
+msgid "Requirements can be based on users, stakeholders, system, software or anything else you find important to capture."
+msgstr ""
+
msgid "Requires approval from %{names}."
msgid_plural "Requires %{count} more approvals from %{names}."
msgstr[0] ""
@@ -17941,6 +18013,9 @@ msgstr ""
msgid "SecurityDashboard|More information"
msgstr ""
+msgid "SecurityDashboard|No vulnerabilities found for dashboard"
+msgstr ""
+
msgid "SecurityDashboard|Pipeline %{pipelineLink} triggered %{timeago} by %{user}"
msgstr ""
@@ -18915,6 +18990,9 @@ msgstr ""
msgid "Something went wrong while moving issues."
msgstr ""
+msgid "Something went wrong while obtaining the Let's Encrypt certificate."
+msgstr ""
+
msgid "Something went wrong while performing the action."
msgstr ""
@@ -19344,10 +19422,13 @@ msgstr ""
msgid "Static Application Security Testing (SAST)"
msgstr ""
-msgid "StaticSiteEditor|A merge request was created:"
+msgid "StaticSiteEditor|Branch could not be created."
+msgstr ""
+
+msgid "StaticSiteEditor|Could not commit the content changes."
msgstr ""
-msgid "StaticSiteEditor|A new branch was created:"
+msgid "StaticSiteEditor|Could not create merge request."
msgstr ""
msgid "StaticSiteEditor|Return to site"
@@ -19359,13 +19440,22 @@ msgstr ""
msgid "StaticSiteEditor|Summary of changes"
msgstr ""
+msgid "StaticSiteEditor|Update %{sourcePath} file"
+msgstr ""
+
msgid "StaticSiteEditor|View merge request"
msgstr ""
-msgid "StaticSiteEditor|Your changes have been submitted and a merge request has been created. The changes won’t be visible on the site until the merge request has been accepted."
+msgid "StaticSiteEditor|You added a commit:"
+msgstr ""
+
+msgid "StaticSiteEditor|You created a merge request:"
msgstr ""
-msgid "StaticSiteEditor|Your changes were committed to it:"
+msgid "StaticSiteEditor|You created a new branch:"
+msgstr ""
+
+msgid "StaticSiteEditor|Your changes have been submitted and a merge request has been created. The changes won’t be visible on the site until the merge request has been accepted."
msgstr ""
msgid "Statistics"
@@ -20637,6 +20727,9 @@ msgstr ""
msgid "This Project is currently archived and read-only. Please unarchive the project first if you want to resume Pull mirroring"
msgstr ""
+msgid "This URL is already used for another link; duplicate URLs are not allowed"
+msgstr ""
+
msgid "This action can lead to data loss. To prevent accidental actions we ask you to confirm your intention."
msgstr ""
@@ -21733,9 +21826,15 @@ msgstr ""
msgid "URL"
msgstr ""
+msgid "URL is required"
+msgstr ""
+
msgid "URL must be a valid url (ex: https://gitlab.com)"
msgstr ""
+msgid "URL must start with %{codeStart}http://%{codeEnd}, %{codeStart}https://%{codeEnd}, or %{codeStart}ftp://%{codeEnd}"
+msgstr ""
+
msgid "URL of the external storage that will serve the repository static objects (e.g. archives, blobs, ...)."
msgstr ""
@@ -21778,6 +21877,9 @@ msgstr ""
msgid "Unable to generate new instance ID"
msgstr ""
+msgid "Unable to load the diff"
+msgstr ""
+
msgid "Unable to load the diff. %{button_try_again}"
msgstr ""
@@ -22691,6 +22793,9 @@ msgstr ""
msgid "View full dashboard"
msgstr ""
+msgid "View full log"
+msgstr ""
+
msgid "View group labels"
msgstr ""
@@ -23521,9 +23626,6 @@ msgstr ""
msgid "You could not create a new trigger."
msgstr ""
-msgid "You didn't renew your %{strong}%{plan_name}%{strong_close} subscription for %{strong}%{namespace_name}%{strong_close} so it was downgraded to the free plan."
-msgstr ""
-
msgid "You didn't renew your %{strong}%{plan_name}%{strong_close} subscription so it was downgraded to the GitLab Core Plan."
msgstr ""
@@ -23755,9 +23857,6 @@ msgstr ""
msgid "YouTube"
msgstr ""
-msgid "Your %{strong}%{plan_name}%{strong_close} subscription for %{strong}%{namespace_name}%{strong_close} will expire on %{strong}%{expires_on}%{strong_close}. After that, you will not to be able to create issues or merge requests as well as many other features."
-msgstr ""
-
msgid "Your %{strong}%{plan_name}%{strong_close} subscription will expire on %{strong}%{expires_on}%{strong_close}. After that, you will not to be able to create issues or merge requests as well as many other features."
msgstr ""
diff --git a/package.json b/package.json
index a0abec5643f..a48b2468ea8 100644
--- a/package.json
+++ b/package.json
@@ -43,7 +43,7 @@
"@gitlab/ui": "11.2.1",
"@gitlab/visual-review-tools": "1.5.1",
"@sentry/browser": "^5.10.2",
- "@sourcegraph/code-host-integration": "0.0.34",
+ "@sourcegraph/code-host-integration": "0.0.36",
"apollo-cache-inmemory": "^1.6.3",
"apollo-client": "^2.6.4",
"apollo-link": "^1.2.11",
diff --git a/qa/qa/page/project/settings/ci_variables.rb b/qa/qa/page/project/settings/ci_variables.rb
index 2bb285d6086..6cdf40cd1da 100644
--- a/qa/qa/page/project/settings/ci_variables.rb
+++ b/qa/qa/page/project/settings/ci_variables.rb
@@ -7,75 +7,47 @@ module QA
class CiVariables < Page::Base
include Common
- view 'app/views/ci/variables/_variable_row.html.haml' do
- element :variable_row, '.ci-variable-row-body' # rubocop:disable QA/ElementWithPattern
- element :variable_key, '.qa-ci-variable-input-key' # rubocop:disable QA/ElementWithPattern
- element :variable_value, '.qa-ci-variable-input-value' # rubocop:disable QA/ElementWithPattern
- element :variable_masked
+ view 'app/assets/javascripts/ci_variable_list/components/ci_variable_modal.vue' do
+ element :ci_variable_key_field
+ element :ci_variable_value_field
+ element :ci_variable_masked_checkbox
+ element :ci_variable_save_button
+ element :ci_variable_delete_button
end
- view 'app/views/ci/variables/_index.html.haml' do
- element :save_variables, '.js-ci-variables-save-button' # rubocop:disable QA/ElementWithPattern
- element :reveal_values, '.js-secret-value-reveal-button' # rubocop:disable QA/ElementWithPattern
+ view 'app/assets/javascripts/ci_variable_list/components/ci_variable_table.vue' do
+ element :ci_variable_table_content
+ element :add_ci_variable_button
+ element :edit_ci_variable_button
+ element :reveal_ci_variable_value_button
end
def fill_variable(key, value, masked)
- keys = all_elements(:ci_variable_input_key, minimum: 1)
- index = keys.size - 1
-
- # After we fill the key, JS would generate another field so
- # we need to use the same index to find the corresponding one.
- keys[index].set(key)
- node = all_elements(:ci_variable_input_value, count: keys.size + 1)[index]
-
- # Simply run `node.set(value)` is too slow for long text here,
- # so we need to run JavaScript directly to set the value.
- # The code was inspired from:
- # https://github.com/teamcapybara/capybara/blob/679548cea10773d45e32808f4d964377cfe5e892/lib/capybara/selenium/node.rb#L217
- execute_script("arguments[0].value = #{value.to_json}", node)
-
- masked_node = all_elements(:variable_masked, count: keys.size + 1)[index]
- toggle_masked(masked_node, masked)
- end
-
- def save_variables
- find('.js-ci-variables-save-button').click
- end
-
- def reveal_variables
- find('.js-secret-value-reveal-button').click
+ fill_element :ci_variable_key_field, key
+ fill_element :ci_variable_value_field, value
+ click_ci_variable_save_button
end
- def variable_value(key)
- within('.ci-variable-row-body', text: key) do
- find('.qa-ci-variable-input-value').value
- end
+ def click_add_variable
+ click_element :add_ci_variable_button
end
- def remove_variable(location: :first)
- within('.ci-variable-row-body', match: location) do
- find('button.ci-variable-row-remove-button').click
+ def click_edit_ci_variable
+ within_element(:ci_variable_table_content) do
+ click_element :edit_ci_variable_button
end
-
- save_variables
end
- private
-
- def toggle_masked(masked_node, masked)
- wait_until(reload: false) do
- masked_node.click
-
- masked ? masked_enabled?(masked_node) : masked_disabled?(masked_node)
- end
+ def click_ci_variable_save_button
+ click_element :ci_variable_save_button
end
- def masked_enabled?(masked_node)
- masked_node[:class].include?('is-checked')
+ def click_reveal_ci_variable_value_button
+ click_element :reveal_ci_variable_value_button
end
- def masked_disabled?(masked_node)
- !masked_enabled?(masked_node)
+ def click_ci_variable_delete_button
+ click_element :ci_variable_delete_button
end
end
end
diff --git a/qa/qa/resource/ci_variable.rb b/qa/qa/resource/ci_variable.rb
index b178a64b72d..f14fcdaac9f 100644
--- a/qa/qa/resource/ci_variable.rb
+++ b/qa/qa/resource/ci_variable.rb
@@ -19,9 +19,8 @@ module QA
Page::Project::Settings::CICD.perform do |setting|
setting.expand_ci_variables do |page|
+ page.click_add_variable
page.fill_variable(key, value, masked)
-
- page.save_variables
end
end
end
diff --git a/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb b/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb
index cff415dcf97..f7a6c8411db 100644
--- a/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb
+++ b/qa/qa/specs/features/browser_ui/4_verify/ci_variable/add_remove_ci_variable_spec.rb
@@ -2,7 +2,7 @@
module QA
context 'Verify' do
- describe 'Add or Remove CI variable via UI', :smoke, quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/issues/207915', type: :stale } do
+ describe 'Add or Remove CI variable via UI', :smoke do
let!(:project) do
Resource::Project.fabricate_via_api! do |project|
project.name = 'project-with-ci-variables'
@@ -10,6 +10,14 @@ module QA
end
end
+ before(:all) do
+ Runtime::Feature.enable_and_verify('new_variables_ui')
+ end
+
+ after(:all) do
+ Runtime::Feature.remove('new_variables_ui')
+ end
+
before do
Flow::Login.sign_in
add_ci_variable
@@ -19,12 +27,12 @@ module QA
it 'user adds a CI variable' do
Page::Project::Settings::CICD.perform do |settings|
settings.expand_ci_variables do |page|
- expect(page).to have_field(with: 'VARIABLE_KEY')
- expect(page).not_to have_field(with: 'some_CI_variable')
+ expect(page).to have_text('VARIABLE_KEY')
+ expect(page).not_to have_text('some_CI_variable')
- page.reveal_variables
+ page.click_reveal_ci_variable_value_button
- expect(page).to have_field(with: 'some_CI_variable')
+ expect(page).to have_text('some_CI_variable')
end
end
end
@@ -32,9 +40,10 @@ module QA
it 'user removes a CI variable' do
Page::Project::Settings::CICD.perform do |settings|
settings.expand_ci_variables do |page|
- page.remove_variable
+ page.click_edit_ci_variable
+ page.click_ci_variable_delete_button
- expect(page).not_to have_field(with: 'VARIABLE_KEY')
+ expect(page).not_to have_text('VARIABLE_KEY')
end
end
end
diff --git a/qa/qa/vendor/jenkins/page/last_job_console.rb b/qa/qa/vendor/jenkins/page/last_job_console.rb
index 4c511a8c1f8..f41b91c2cdb 100644
--- a/qa/qa/vendor/jenkins/page/last_job_console.rb
+++ b/qa/qa/vendor/jenkins/page/last_job_console.rb
@@ -14,7 +14,12 @@ module QA
end
def has_successful_build?
- page.has_text?('Finished: SUCCESS')
+ # Retry on errors such as:
+ # Selenium::WebDriver::Error::JavascriptError:
+ # javascript error: this.each is not a function
+ Support::Retrier.retry_on_exception(reload_page: page) do
+ page.has_text?('Finished: SUCCESS')
+ end
end
def no_failed_status_update?
diff --git a/rubocop/cop/performance/ar_count_each.rb b/rubocop/cop/performance/ar_count_each.rb
new file mode 100644
index 00000000000..2fe8e549872
--- /dev/null
+++ b/rubocop/cop/performance/ar_count_each.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+module RuboCop
+ module Cop
+ module Performance
+ class ARCountEach < RuboCop::Cop::Cop
+ def message(ivar)
+ "If #{ivar} is AR relation, avoid `#{ivar}.count ...; #{ivar}.each... `, this will trigger two queries. " \
+ "Use `#{ivar}.load.size ...; #{ivar}.each... ` instead. If #{ivar} is an array, try to use #{ivar}.size."
+ end
+
+ def_node_matcher :count_match, <<~PATTERN
+ (send (ivar $_) :count)
+ PATTERN
+
+ def_node_matcher :each_match, <<~PATTERN
+ (send (ivar $_) :each)
+ PATTERN
+
+ def file_name(node)
+ node.location.expression.source_buffer.name
+ end
+
+ def in_haml_file?(node)
+ file_name(node).end_with?('.haml.rb')
+ end
+
+ def on_send(node)
+ return unless in_haml_file?(node)
+
+ ivar_count = count_match(node)
+ return unless ivar_count
+
+ node.each_ancestor(:begin) do |begin_node|
+ begin_node.each_descendant do |n|
+ ivar_each = each_match(n)
+
+ add_offense(node, location: :expression, message: message(ivar_count)) if ivar_each == ivar_count
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 4a3d591e94d..2a913069acc 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -748,7 +748,7 @@ describe ApplicationController do
end
end
- describe '#current_user_mode', :do_not_mock_admin_mode do
+ describe '#current_user_mode' do
include_context 'custom session'
controller(described_class) do
diff --git a/spec/controllers/concerns/enforces_admin_authentication_spec.rb b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
index a8494543558..1809bb2d636 100644
--- a/spec/controllers/concerns/enforces_admin_authentication_spec.rb
+++ b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-describe EnforcesAdminAuthentication, :do_not_mock_admin_mode do
+describe EnforcesAdminAuthentication do
include AdminModeHelper
let(:user) { create(:user) }
diff --git a/spec/controllers/groups/settings/ci_cd_controller_spec.rb b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
index b5154f4f877..b2ae16e0ee6 100644
--- a/spec/controllers/groups/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/groups/settings/ci_cd_controller_spec.rb
@@ -180,32 +180,38 @@ describe Groups::Settings::CiCdController do
group.add_owner(user)
end
- it { is_expected.to redirect_to(group_settings_ci_cd_path) }
+ context 'when admin mode is disabled' do
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
- context 'when service execution went wrong' do
- let(:update_service) { double }
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to redirect_to(group_settings_ci_cd_path) }
- before do
- allow(Groups::UpdateService).to receive(:new).and_return(update_service)
- allow(update_service).to receive(:execute).and_return(false)
- allow_any_instance_of(Group).to receive_message_chain(:errors, :full_messages)
- .and_return(['Error 1'])
+ context 'when service execution went wrong' do
+ let(:update_service) { double }
- subject
- end
+ before do
+ allow(Groups::UpdateService).to receive(:new).and_return(update_service)
+ allow(update_service).to receive(:execute).and_return(false)
+ allow_any_instance_of(Group).to receive_message_chain(:errors, :full_messages)
+ .and_return(['Error 1'])
- it 'returns a flash alert' do
- expect(response).to set_flash[:alert]
- .to eq("There was a problem updating the pipeline settings: [\"Error 1\"].")
+ subject
+ end
+
+ it 'returns a flash alert' do
+ expect(response).to set_flash[:alert]
+ .to eq("There was a problem updating the pipeline settings: [\"Error 1\"].")
+ end
end
- end
- context 'when service execution was successful' do
- it 'returns a flash notice' do
- subject
+ context 'when service execution was successful' do
+ it 'returns a flash notice' do
+ subject
- expect(response).to set_flash[:notice]
- .to eq('Pipeline settings was updated for the group')
+ expect(response).to set_flash[:notice]
+ .to eq('Pipeline settings was updated for the group')
+ end
end
end
end
diff --git a/spec/controllers/projects/clusters/applications_controller_spec.rb b/spec/controllers/projects/clusters/applications_controller_spec.rb
index 8dcbf575627..6de3593be28 100644
--- a/spec/controllers/projects/clusters/applications_controller_spec.rb
+++ b/spec/controllers/projects/clusters/applications_controller_spec.rb
@@ -10,7 +10,12 @@ describe Projects::Clusters::ApplicationsController do
end
shared_examples 'a secure endpoint' do
- it { expect { subject }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { subject }.to be_allowed_for(:admin)
+ end
+ it 'is denied for admin when admin mode disabled' do
+ expect { subject }.to be_denied_for(:admin)
+ end
it { expect { subject }.to be_allowed_for(:owner).of(project) }
it { expect { subject }.to be_allowed_for(:maintainer).of(project) }
it { expect { subject }.to be_denied_for(:developer).of(project) }
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index a5683a27837..07733ec30d9 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -65,7 +65,12 @@ describe Projects::ClustersController do
describe 'security' do
let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -151,7 +156,12 @@ describe Projects::ClustersController do
end
describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -240,7 +250,12 @@ describe Projects::ClustersController do
allow(WaitForClusterCreationWorker).to receive(:perform_in).and_return(nil)
end
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -346,7 +361,12 @@ describe Projects::ClustersController do
stub_kubeclient_get_namespace('https://kubernetes.example.com', namespace: 'my-namespace')
end
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -414,7 +434,12 @@ describe Projects::ClustersController do
allow(WaitForClusterCreationWorker).to receive(:perform_in)
end
- it { expect { post_create_aws }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { post_create_aws }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { post_create_aws }.to be_denied_for(:admin)
+ end
it { expect { post_create_aws }.to be_allowed_for(:owner).of(project) }
it { expect { post_create_aws }.to be_allowed_for(:maintainer).of(project) }
it { expect { post_create_aws }.to be_denied_for(:developer).of(project) }
@@ -469,7 +494,12 @@ describe Projects::ClustersController do
end
describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -501,7 +531,12 @@ describe Projects::ClustersController do
end
describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -541,7 +576,12 @@ describe Projects::ClustersController do
end
describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -574,7 +614,12 @@ describe Projects::ClustersController do
end
describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -677,7 +722,12 @@ describe Projects::ClustersController do
describe 'security' do
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
@@ -746,7 +796,12 @@ describe Projects::ClustersController do
describe 'security' do
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is disabled for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_denied_for(:developer).of(project) }
diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb
index a97f9ebf36b..a6bbe6bd012 100644
--- a/spec/controllers/projects/deploy_keys_controller_spec.rb
+++ b/spec/controllers/projects/deploy_keys_controller_spec.rb
@@ -163,7 +163,7 @@ describe Projects::DeployKeysController do
end
end
- context 'with admin' do
+ context 'with admin', :enable_admin_mode do
before do
sign_in(admin)
end
@@ -228,7 +228,7 @@ describe Projects::DeployKeysController do
end
end
- context 'with admin' do
+ context 'with admin', :enable_admin_mode do
before do
sign_in(admin)
end
@@ -284,7 +284,7 @@ describe Projects::DeployKeysController do
end
end
- context 'with admin' do
+ context 'with admin', :enable_admin_mode do
before do
sign_in(admin)
end
@@ -311,8 +311,16 @@ describe Projects::DeployKeysController do
context 'public deploy key attached to project' do
let(:extra_params) { deploy_key_params('updated title', '1') }
- it 'updates the title of the deploy key' do
- expect { subject }.to change { deploy_key.reload.title }.to('updated title')
+ context 'admin mode disabled' do
+ it 'does not update the title of the deploy key' do
+ expect { subject }.not_to change { deploy_key.reload.title }
+ end
+ end
+
+ context 'admin mode enabled', :enable_admin_mode do
+ it 'updates the title of the deploy key' do
+ expect { subject }.to change { deploy_key.reload.title }.to('updated title')
+ end
end
it 'updates can_push of deploy_keys_project' do
diff --git a/spec/controllers/projects/import/jira_controller_spec.rb b/spec/controllers/projects/import/jira_controller_spec.rb
index 8e0d506e5e4..4629aab65dd 100644
--- a/spec/controllers/projects/import/jira_controller_spec.rb
+++ b/spec/controllers/projects/import/jira_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
describe Projects::Import::JiraController do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+ let_it_be(:jira_project_key) { 'Test' }
context 'with anonymous user' do
before do
@@ -21,7 +22,7 @@ describe Projects::Import::JiraController do
context 'post import' do
it 'redirects to issues page' do
- post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: jira_project_key }
expect(response).to redirect_to(new_user_session_path)
end
@@ -49,7 +50,7 @@ describe Projects::Import::JiraController do
context 'post import' do
it 'redirects to issues page' do
- post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: jira_project_key }
expect(response).to redirect_to(project_issues_path(project))
end
@@ -65,12 +66,64 @@ describe Projects::Import::JiraController do
context 'when jira service is enabled for the project' do
let_it_be(:jira_service) { create(:jira_service, project: project) }
+ context 'when user is developer' do
+ let_it_be(:dev) { create(:user) }
+
+ before do
+ sign_in(dev)
+ project.add_developer(dev)
+ end
+
+ context 'get show' do
+ before do
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
+ end
+
+ it 'does not query jira service' do
+ expect(project).not_to receive(:jira_service)
+ end
+
+ it 'renders show template' do
+ expect(response).to render_template(:show)
+ expect(assigns(:jira_projects)).not_to be_present
+ end
+ end
+
+ context 'post import' do
+ it 'returns 404' do
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: jira_project_key }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ context 'when issues disabled' do
+ let_it_be(:disabled_issues_project) { create(:project, :public, :issues_disabled) }
+
+ context 'get show' do
+ it 'returs 404' do
+ get :show, params: { namespace_id: project.namespace.to_param, project_id: disabled_issues_project }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'post import' do
+ it 'returs 404' do
+ post :import, params: { namespace_id: disabled_issues_project.namespace, project_id: disabled_issues_project, jira_project_key: jira_project_key }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
context 'when running jira import first time' do
context 'get show' do
before do
allow(JIRA::Resource::Project).to receive(:all).and_return(jira_projects)
- expect(project.import_state).to be_nil
+ expect(project.jira_imports).to be_empty
get :show, params: { namespace_id: project.namespace.to_param, project_id: project }
end
@@ -84,7 +137,7 @@ describe Projects::Import::JiraController do
end
end
- context 'when everything is ok' do
+ context 'when projects retrieved from Jira' do
let(:jira_projects) { [double(name: 'FOO project', key: 'FOO')] }
it 'renders show template' do
@@ -107,14 +160,14 @@ describe Projects::Import::JiraController do
it 'creates import state' do
expect(project.latest_jira_import).to be_nil
- post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: 'Test' }
+ post :import, params: { namespace_id: project.namespace, project_id: project, jira_project_key: jira_project_key }
project.reload
jira_import = project.latest_jira_import
expect(project.import_type).to eq 'jira'
expect(jira_import.status).to eq 'scheduled'
- expect(jira_import.jira_project_key).to eq 'Test'
+ expect(jira_import.jira_project_key).to eq jira_project_key
expect(response).to redirect_to(project_import_jira_path(project))
end
end
@@ -145,7 +198,7 @@ describe Projects::Import::JiraController do
end
context 'when jira import ran before' do
- let_it_be(:jira_import_state) { create(:jira_import_state, :finished, project: project, jira_project_key: 'Test') }
+ let_it_be(:jira_import_state) { create(:jira_import_state, :finished, project: project, jira_project_key: jira_project_key) }
context 'get show' do
it 'renders import status' do
@@ -164,7 +217,7 @@ describe Projects::Import::JiraController do
project.reload
expect(project.latest_jira_import.status).to eq 'scheduled'
expect(project.jira_imports.size).to eq 2
- expect(project.jira_imports.first.jira_project_key).to eq 'Test'
+ expect(project.jira_imports.first.jira_project_key).to eq jira_project_key
expect(project.jira_imports.last.jira_project_key).to eq 'New Project'
expect(response).to redirect_to(project_import_jira_path(project))
end
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 74ed4a0f991..9526e14a748 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -586,12 +586,23 @@ describe Projects::IssuesController do
expect(assigns(:issues)).to include request_forgery_timing_attack
end
- it 'lists confidential issues for admin' do
- sign_in(admin)
- get_issues
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'lists confidential issues for admin' do
+ sign_in(admin)
+ get_issues
- expect(assigns(:issues)).to include unescaped_parameter_value
- expect(assigns(:issues)).to include request_forgery_timing_attack
+ expect(assigns(:issues)).to include unescaped_parameter_value
+ expect(assigns(:issues)).to include request_forgery_timing_attack
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'does not list confidential issues for admin' do
+ sign_in(admin)
+ get_issues
+
+ expect(assigns(:issues)).to eq [issue]
+ end
end
def get_issues
@@ -648,11 +659,22 @@ describe Projects::IssuesController do
expect(response).to have_gitlab_http_status http_status[:success]
end
- it "returns #{http_status[:success]} for admin" do
- sign_in(admin)
- go(id: unescaped_parameter_value.to_param)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it "returns #{http_status[:success]} for admin" do
+ sign_in(admin)
+ go(id: unescaped_parameter_value.to_param)
- expect(response).to have_gitlab_http_status http_status[:success]
+ expect(response).to have_gitlab_http_status http_status[:success]
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ xit 'returns 404 for admin' do
+ sign_in(admin)
+ go(id: unescaped_parameter_value.to_param)
+
+ expect(response).to have_gitlab_http_status :not_found
+ end
end
end
@@ -1085,6 +1107,48 @@ describe Projects::IssuesController do
expect { subject }.to change(SentryIssue, :count)
end
end
+
+ context 'when the endpoint receives requests above the limit' do
+ before do
+ stub_application_setting(issues_create_limit: 5)
+ end
+
+ it 'prevents from creating more issues', :request_store do
+ 5.times { post_new_issue }
+
+ expect { post_new_issue }
+ .to change { Gitlab::GitalyClient.get_request_count }.by(1) # creates 1 projects and 0 issues
+
+ post_new_issue
+ expect(response.body).to eq(_('This endpoint has been requested too many times. Try again later.'))
+ expect(response).to have_gitlab_http_status(:too_many_requests)
+ end
+
+ it 'logs the event on auth.log' do
+ attributes = {
+ message: 'Application_Rate_Limiter_Request',
+ env: :issues_create_request_limit,
+ remote_ip: '0.0.0.0',
+ request_method: 'POST',
+ path: "/#{project.full_path}/-/issues",
+ user_id: user.id,
+ username: user.username
+ }
+
+ expect(Gitlab::AuthLogger).to receive(:error).with(attributes).once
+
+ project.add_developer(user)
+ sign_in(user)
+
+ 6.times do
+ post :create, params: {
+ namespace_id: project.namespace.to_param,
+ project_id: project,
+ issue: { title: 'Title', description: 'Description' }
+ }
+ end
+ end
+ end
end
describe 'POST #mark_as_spam' do
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 0071e6c8a19..ef1253edda5 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -391,10 +391,20 @@ describe Projects::JobsController, :clean_gitlab_redis_shared_state do
sign_in(user)
end
- it 'settings_path is available' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('job/job_details')
- expect(json_response['runners']['settings_path']).to match(/runners/)
+ context 'when admin mode is disabled' do
+ it 'settings_path is not available' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['runners']).not_to have_key('settings_path')
+ end
+ end
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'settings_path is available' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('job/job_details')
+ expect(json_response['runners']['settings_path']).to match(/runners/)
+ end
end
end
end
diff --git a/spec/controllers/projects/mirrors_controller_spec.rb b/spec/controllers/projects/mirrors_controller_spec.rb
index 3579e4aa2cf..faeade0d737 100644
--- a/spec/controllers/projects/mirrors_controller_spec.rb
+++ b/spec/controllers/projects/mirrors_controller_spec.rb
@@ -39,12 +39,24 @@ describe Projects::MirrorsController do
expect(response).to have_gitlab_http_status(:not_found)
end
- it 'allows requests from an admin user' do
- user.update!(admin: true)
- sign_in(user)
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it 'allows requests from an admin user' do
+ user.update!(admin: true)
+ sign_in(user)
- subject_action
- expect(response).to redirect_to(project_settings_path)
+ subject_action
+ expect(response).to redirect_to(project_settings_path)
+ end
+ end
+
+ context 'when admin mode is disabled' do
+ it 'disallows requests from an admin user' do
+ user.update!(admin: true)
+ sign_in(user)
+
+ subject_action
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
end
end
diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb
index ef5e831d26c..c78c5fe2886 100644
--- a/spec/controllers/projects/pages_domains_controller_spec.rb
+++ b/spec/controllers/projects/pages_domains_controller_spec.rb
@@ -181,6 +181,24 @@ describe Projects::PagesDomainsController do
end
end
+ describe 'POST retry_auto_ssl' do
+ before do
+ pages_domain.update!(auto_ssl_enabled: true, auto_ssl_failed: true)
+ end
+
+ let(:params) { request_params.merge(id: pages_domain.domain) }
+
+ it 'calls retry service and redirects' do
+ expect_next_instance_of(PagesDomains::RetryAcmeOrderService, pages_domain) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ post :retry_auto_ssl, params: params
+
+ expect(response).to redirect_to project_pages_domain_path(project, pages_domain)
+ end
+ end
+
describe 'DELETE destroy' do
it "deletes the pages domain" do
expect do
diff --git a/spec/controllers/projects/pipeline_schedules_controller_spec.rb b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
index 72b282429e9..635980ba93b 100644
--- a/spec/controllers/projects/pipeline_schedules_controller_spec.rb
+++ b/spec/controllers/projects/pipeline_schedules_controller_spec.rb
@@ -127,7 +127,12 @@ describe Projects::PipelineSchedulesController do
describe 'security' do
let(:schedule) { attributes_for(:ci_pipeline_schedule) }
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is denied for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_allowed_for(:developer).of(project) }
@@ -279,7 +284,12 @@ describe Projects::PipelineSchedulesController do
describe 'security' do
let(:schedule) { { description: 'updated_desc' } }
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is denied for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_allowed_for(:developer).of(project).own(pipeline_schedule) }
@@ -343,7 +353,12 @@ describe Projects::PipelineSchedulesController do
end
describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is denied for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_allowed_for(:developer).of(project).own(pipeline_schedule) }
@@ -361,7 +376,12 @@ describe Projects::PipelineSchedulesController do
describe 'GET #take_ownership' do
describe 'security' do
- it { expect { go }.to be_allowed_for(:admin) }
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { go }.to be_allowed_for(:admin)
+ end
+ it 'is denied for admin when admin mode disabled' do
+ expect { go }.to be_denied_for(:admin)
+ end
it { expect { go }.to be_allowed_for(:owner).of(project) }
it { expect { go }.to be_allowed_for(:maintainer).of(project) }
it { expect { go }.to be_allowed_for(:developer).of(project).own(pipeline_schedule) }
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index 3684a1bb8d8..0facef85985 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -245,11 +245,22 @@ describe Projects::Settings::CiCdController do
context 'and user is an admin' do
let(:user) { create(:admin) }
- it 'sets max_artifacts_size' do
- subject
+ context 'with admin mode disabled' do
+ it 'does not set max_artifacts_size' do
+ subject
- project.reload
- expect(project.max_artifacts_size).to eq(10)
+ project.reload
+ expect(project.max_artifacts_size).to be_nil
+ end
+ end
+
+ context 'with admin mode enabled', :enable_admin_mode do
+ it 'sets max_artifacts_size' do
+ subject
+
+ project.reload
+ expect(project.max_artifacts_size).to eq(10)
+ end
end
end
end
diff --git a/spec/controllers/projects/static_site_editor_controller_spec.rb b/spec/controllers/projects/static_site_editor_controller_spec.rb
index 7f1b67fc734..d1224bb75c0 100644
--- a/spec/controllers/projects/static_site_editor_controller_spec.rb
+++ b/spec/controllers/projects/static_site_editor_controller_spec.rb
@@ -10,7 +10,8 @@ describe Projects::StaticSiteEditorController do
{
namespace_id: project.namespace,
project_id: project,
- id: 'master/README.md'
+ id: 'master/README.md',
+ return_url: 'http://example.com'
}
end
@@ -38,6 +39,18 @@ describe Projects::StaticSiteEditorController do
it 'renders the edit page' do
expect(response).to render_template(:show)
end
+
+ it 'assigns a config variable' do
+ expect(assigns(:config)).to be_a(Gitlab::StaticSiteEditor::Config)
+ end
+
+ context 'when combination of ref and file path is incorrect' do
+ let(:default_params) { super().merge(id: 'unknown') }
+
+ it 'responds with 404 page' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
end
end
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index d0e0dabc9f2..fc3efc8e805 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -362,7 +362,7 @@ describe ProjectsController do
end
describe 'GET edit' do
- it 'allows an admin user to access the page' do
+ it 'allows an admin user to access the page', :enable_admin_mode do
sign_in(create(:user, :admin))
get :edit,
@@ -531,7 +531,7 @@ describe ProjectsController do
end
end
- describe "#update" do
+ describe "#update", :enable_admin_mode do
render_views
let(:admin) { create(:admin) }
@@ -672,7 +672,7 @@ describe ProjectsController do
end
end
- describe '#transfer' do
+ describe '#transfer', :enable_admin_mode do
render_views
let(:project) { create(:project, :repository) }
@@ -720,7 +720,7 @@ describe ProjectsController do
end
end
- describe "#destroy" do
+ describe "#destroy", :enable_admin_mode do
let(:admin) { create(:admin) }
it "redirects to the dashboard", :sidekiq_might_not_need_inline do
@@ -1094,7 +1094,7 @@ describe ProjectsController do
end
end
- context 'for a DELETE request' do
+ context 'for a DELETE request', :enable_admin_mode do
before do
sign_in(create(:admin))
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 82383cfa2b0..a259c5142fc 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -13,7 +13,7 @@ FactoryBot.define do
end
trait :remote_store do
- file_store { JobArtifactUploader::Store::REMOTE}
+ file_store { JobArtifactUploader::Store::REMOTE }
end
after :build do |artifact|
diff --git a/spec/factories/ci/pipelines.rb b/spec/factories/ci/pipelines.rb
index e0478097148..257dd3337ba 100644
--- a/spec/factories/ci/pipelines.rb
+++ b/spec/factories/ci/pipelines.rb
@@ -5,7 +5,7 @@ FactoryBot.define do
factory :ci_empty_pipeline, class: 'Ci::Pipeline' do
source { :push }
ref { 'master' }
- sha { '97de212e80737a608d939f648d959671fb0a0142' }
+ sha { 'b83d6e391c22777fca1ed3012fce84f633d7fed0' }
status { 'pending' }
add_attribute(:protected) { false }
diff --git a/spec/factories/deploy_tokens.rb b/spec/factories/deploy_tokens.rb
index e86d4ab8812..657915f9976 100644
--- a/spec/factories/deploy_tokens.rb
+++ b/spec/factories/deploy_tokens.rb
@@ -7,6 +7,7 @@ FactoryBot.define do
sequence(:name) { |n| "PDT #{n}" }
read_repository { true }
read_registry { true }
+ write_registry { false }
revoked { false }
expires_at { 5.days.from_now }
deploy_token_type { DeployToken.deploy_token_types[:project_type] }
diff --git a/spec/factories/diff_note_positions.rb b/spec/factories/diff_note_positions.rb
new file mode 100644
index 00000000000..6e95e306d50
--- /dev/null
+++ b/spec/factories/diff_note_positions.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :diff_note_position do
+ association :note, factory: :diff_note_on_merge_request
+ line_code { note.line_code }
+ position { note.position }
+ diff_type { :head }
+ end
+end
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 73f759f8a54..218cbf871a9 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -244,13 +244,15 @@ describe 'Dashboard Projects' do
ActiveRecord::QueryRecorder.new { visit dashboard_projects_path }.count
- # There are three known N+1 queries:
+ # There are seven known N+1 queries: https://gitlab.com/gitlab-org/gitlab/-/issues/214037
# 1. Project#open_issues_count
# 2. Project#open_merge_requests_count
# 3. Project#forks_count
- #
- # In addition, ProjectsHelper#load_pipeline_status also adds an
- # additional query.
- expect { visit dashboard_projects_path }.not_to exceed_query_limit(control_count + 4)
+ # 4. ProjectsHelper#load_pipeline_status
+ # 5. RendersMemberAccess#preload_max_member_access_for_collection
+ # 6. User#max_member_access_for_project_ids
+ # 7. CommitWithPipeline#last_pipeline
+
+ expect { visit dashboard_projects_path }.not_to exceed_query_limit(control_count + 7)
end
end
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index 2cd9cbc4471..6907c681417 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -26,6 +26,10 @@ describe 'Dashboard shortcuts', :js do
check_page_title('To-Do List')
+ find('body').send_keys([:shift, 'G'])
+
+ check_page_title('Groups')
+
find('body').send_keys([:shift, 'P'])
check_page_title('Projects')
diff --git a/spec/features/ide/user_commits_changes_spec.rb b/spec/features/ide/user_commits_changes_spec.rb
new file mode 100644
index 00000000000..f53abde1523
--- /dev/null
+++ b/spec/features/ide/user_commits_changes_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe 'IDE user commits changes', :js do
+ include WebIdeSpecHelpers
+
+ let(:project) { create(:project, :public, :repository) }
+ let(:user) { project.owner }
+
+ before do
+ sign_in(user)
+
+ ide_visit(project)
+ end
+
+ it 'user updates nested files' do
+ content = <<~HEREDOC
+ Lorem ipsum
+ Dolar sit
+ Amit
+ HEREDOC
+
+ ide_create_new_file('foo/bar/lorem_ipsum.md', content: content)
+ ide_delete_file('foo/bar/.gitkeep')
+
+ ide_commit
+
+ expect(page).to have_content('All changes are committed')
+ expect(project.repository.blob_at('master', 'foo/bar/.gitkeep')).to be_nil
+ expect(project.repository.blob_at('master', 'foo/bar/lorem_ipsum.md').data).to eql(content)
+ end
+end
diff --git a/spec/features/projects/environments/environments_spec.rb b/spec/features/projects/environments/environments_spec.rb
index d7f12411a93..cee9b6d50ba 100644
--- a/spec/features/projects/environments/environments_spec.rb
+++ b/spec/features/projects/environments/environments_spec.rb
@@ -399,10 +399,12 @@ describe 'Environments page', :js do
describe 'environments folders' do
before do
- create(:environment, project: project,
+ create(:environment, :will_auto_stop,
+ project: project,
name: 'staging/review-1',
state: :available)
- create(:environment, project: project,
+ create(:environment, :will_auto_stop,
+ project: project,
name: 'staging/review-2',
state: :available)
end
@@ -420,6 +422,14 @@ describe 'Environments page', :js do
expect(page).to have_content 'review-1'
expect(page).to have_content 'review-2'
+ within('.ci-table') do
+ within('.gl-responsive-table-row:nth-child(3)') do
+ expect(find('.js-auto-stop').text).not_to be_empty
+ end
+ within('.gl-responsive-table-row:nth-child(4)') do
+ expect(find('.js-auto-stop').text).not_to be_empty
+ end
+ end
end
end
diff --git a/spec/features/projects/pages_lets_encrypt_spec.rb b/spec/features/projects/pages_lets_encrypt_spec.rb
index 4f9c1903344..da9b191271a 100644
--- a/spec/features/projects/pages_lets_encrypt_spec.rb
+++ b/spec/features/projects/pages_lets_encrypt_spec.rb
@@ -85,6 +85,22 @@ describe "Pages with Let's Encrypt", :https_pages_enabled do
end
end
+ context "when we failed to obtain Let's Encrypt certificate", :js do
+ let(:domain) do
+ create(:pages_domain, auto_ssl_enabled: true, auto_ssl_failed: true, project: project)
+ end
+
+ it 'user can retry obtaining certificate' do
+ visit project_pages_domain_path(project, domain)
+
+ expect(page).to have_text("Something went wrong while obtaining the Let's Encrypt certificate.")
+
+ click_on('Retry')
+
+ expect(page).to have_text("GitLab is obtaining a Let's Encrypt SSL certificate for this domain. This process can take some time. Please try again later.")
+ end
+ end
+
shared_examples 'user sees private keys only for user provided certificate' do
shared_examples 'user do not see private key' do
it 'user do not see private key' do
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 561c0552007..e8846b5b617 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -133,15 +133,8 @@ describe 'Pipeline', :js do
context 'when there are two related merge requests' do
before do
- create(:merge_request,
- source_project: project,
- source_branch: pipeline.ref,
- target_branch: 'feature-1')
-
- create(:merge_request,
- source_project: project,
- source_branch: pipeline.ref,
- target_branch: 'feature-2')
+ create(:merge_request, source_project: project, source_branch: pipeline.ref)
+ create(:merge_request, source_project: project, source_branch: pipeline.ref, target_branch: 'fix')
end
it 'links to the most recent related merge request' do
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index fdefa16ac19..f34c2fb69eb 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -651,7 +651,7 @@ describe('Api', () => {
describe('when an error occurs while getting a raw file', () => {
it('rejects the Promise', () => {
- mock.onDelete(expectedUrl).replyOnce(500);
+ mock.onPost(expectedUrl).replyOnce(500);
return Api.getRawFile(dummyProjectPath, dummyFilePath).catch(() => {
expect(mock.history.get).toHaveLength(1);
@@ -659,4 +659,36 @@ describe('Api', () => {
});
});
});
+
+ describe('createProjectMergeRequest', () => {
+ const dummyProjectPath = 'gitlab-org/gitlab';
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${encodeURIComponent(
+ dummyProjectPath,
+ )}/merge_requests`;
+ const options = {
+ source_branch: 'feature',
+ target_branch: 'master',
+ title: 'Add feature',
+ };
+
+ describe('when the merge request is successfully created', () => {
+ it('resolves the Promise', () => {
+ mock.onPost(expectedUrl, options).replyOnce(201);
+
+ return Api.createProjectMergeRequest(dummyProjectPath, options).then(() => {
+ expect(mock.history.post).toHaveLength(1);
+ });
+ });
+ });
+
+ describe('when an error occurs while getting a raw file', () => {
+ it('rejects the Promise', () => {
+ mock.onPost(expectedUrl).replyOnce(500);
+
+ return Api.createProjectMergeRequest(dummyProjectPath).catch(() => {
+ expect(mock.history.post).toHaveLength(1);
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/ide/components/repo_commit_section_spec.js b/spec/frontend/ide/components/repo_commit_section_spec.js
new file mode 100644
index 00000000000..5ea03eb1593
--- /dev/null
+++ b/spec/frontend/ide/components/repo_commit_section_spec.js
@@ -0,0 +1,134 @@
+import { mount } from '@vue/test-utils';
+import { createStore } from '~/ide/stores';
+import router from '~/ide/ide_router';
+import RepoCommitSection from '~/ide/components/repo_commit_section.vue';
+import { stageKeys } from '~/ide/constants';
+import { file } from '../helpers';
+
+const TEST_NO_CHANGES_SVG = 'nochangessvg';
+
+describe('RepoCommitSection', () => {
+ let wrapper;
+ let store;
+
+ function createComponent() {
+ wrapper = mount(RepoCommitSection, { store });
+ }
+
+ function setupDefaultState() {
+ store.state.noChangesStateSvgPath = 'svg';
+ store.state.committedStateSvgPath = 'commitsvg';
+ store.state.currentProjectId = 'abcproject';
+ store.state.currentBranchId = 'master';
+ store.state.projects.abcproject = {
+ web_url: '',
+ branches: {
+ master: {
+ workingReference: '1',
+ },
+ },
+ };
+
+ const files = [file('file1'), file('file2')].map(f =>
+ Object.assign(f, {
+ type: 'blob',
+ content: 'orginal content',
+ }),
+ );
+
+ store.state.rightPanelCollapsed = false;
+ store.state.currentBranch = 'master';
+ store.state.changedFiles = [];
+ store.state.stagedFiles = [{ ...files[0] }, { ...files[1] }];
+ store.state.stagedFiles.forEach(f =>
+ Object.assign(f, {
+ changed: true,
+ staged: true,
+ content: 'testing',
+ }),
+ );
+
+ files.forEach(f => {
+ store.state.entries[f.path] = f;
+ });
+ }
+
+ beforeEach(() => {
+ store = createStore();
+
+ jest.spyOn(store, 'dispatch');
+ jest.spyOn(router, 'push').mockImplementation();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('empty Stage', () => {
+ beforeEach(() => {
+ store.state.noChangesStateSvgPath = TEST_NO_CHANGES_SVG;
+ store.state.committedStateSvgPath = 'svg';
+
+ createComponent();
+ });
+
+ it('renders no changes text', () => {
+ expect(
+ wrapper
+ .find('.js-empty-state')
+ .text()
+ .trim(),
+ ).toContain('No changes');
+ expect(wrapper.find('.js-empty-state img').attributes('src')).toBe(TEST_NO_CHANGES_SVG);
+ });
+ });
+
+ describe('default', () => {
+ beforeEach(() => {
+ setupDefaultState();
+
+ createComponent();
+ });
+
+ it('opens last opened file', () => {
+ expect(store.state.openFiles.length).toBe(1);
+ expect(store.state.openFiles[0].pending).toBe(true);
+ });
+
+ it('calls openPendingTab', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('openPendingTab', {
+ file: store.getters.lastOpenedFile,
+ keyPrefix: stageKeys.staged,
+ });
+ });
+
+ it('renders a commit section', () => {
+ const allFiles = store.state.changedFiles.concat(store.state.stagedFiles);
+ const changedFileNames = wrapper
+ .findAll('.multi-file-commit-list > li')
+ .wrappers.map(x => x.text().trim());
+
+ expect(changedFileNames).toEqual(allFiles.map(x => x.path));
+ });
+ });
+
+ describe('with unstaged file', () => {
+ beforeEach(() => {
+ setupDefaultState();
+
+ store.state.changedFiles = store.state.stagedFiles.map(x =>
+ Object.assign(x, { staged: false }),
+ );
+ store.state.stagedFiles = [];
+
+ createComponent();
+ });
+
+ it('calls openPendingTab with unstaged prefix', () => {
+ expect(store.dispatch).toHaveBeenCalledWith('openPendingTab', {
+ file: store.getters.lastOpenedFile,
+ keyPrefix: stageKeys.unstaged,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/text_utility_spec.js b/spec/frontend/lib/utils/text_utility_spec.js
index dc8f6c64136..4969c591dcd 100644
--- a/spec/frontend/lib/utils/text_utility_spec.js
+++ b/spec/frontend/lib/utils/text_utility_spec.js
@@ -224,4 +224,18 @@ describe('text_utility', () => {
});
});
});
+
+ describe('hasContent', () => {
+ it.each`
+ txt | result
+ ${null} | ${false}
+ ${undefined} | ${false}
+ ${{ an: 'object' }} | ${false}
+ ${''} | ${false}
+ ${' \t\r\n'} | ${false}
+ ${'hello'} | ${true}
+ `('returns $result for input $txt', ({ result, txt }) => {
+ expect(textUtils.hasContent(txt)).toEqual(result);
+ });
+ });
});
diff --git a/spec/frontend/logs/components/environment_logs_spec.js b/spec/frontend/logs/components/environment_logs_spec.js
index befcd462828..9046253bdc6 100644
--- a/spec/frontend/logs/components/environment_logs_spec.js
+++ b/spec/frontend/logs/components/environment_logs_spec.js
@@ -10,7 +10,6 @@ import {
mockPods,
mockLogsResult,
mockTrace,
- mockPodName,
mockEnvironmentsEndpoint,
mockDocumentationPath,
} from '../mock_data';
@@ -43,7 +42,7 @@ describe('EnvironmentLogs', () => {
const findSimpleFilters = () => wrapper.find({ ref: 'log-simple-filters' });
const findAdvancedFilters = () => wrapper.find({ ref: 'log-advanced-filters' });
- const findInfoAlert = () => wrapper.find('.js-elasticsearch-alert');
+ const findElasticsearchNotice = () => wrapper.find({ ref: 'elasticsearchNotice' });
const findLogControlButtons = () => wrapper.find({ name: 'log-control-buttons-stub' });
const findInfiniteScroll = () => wrapper.find({ ref: 'infiniteScroll' });
@@ -160,6 +159,10 @@ describe('EnvironmentLogs', () => {
initWrapper();
});
+ it('does not display an alert to upgrade to ES', () => {
+ expect(findElasticsearchNotice().exists()).toBe(false);
+ });
+
it('displays a disabled environments dropdown', () => {
expect(findEnvironmentsDropdown().attributes('disabled')).toBe('true');
expect(findEnvironmentsDropdown().findAll(GlDropdownItem).length).toBe(0);
@@ -204,7 +207,7 @@ describe('EnvironmentLogs', () => {
});
it('displays an alert to upgrade to ES', () => {
- expect(findInfoAlert().exists()).toBe(true);
+ expect(findElasticsearchNotice().exists()).toBe(true);
});
it('displays simple filters for kubernetes logs API', () => {
@@ -235,7 +238,7 @@ describe('EnvironmentLogs', () => {
});
it('does not display an alert to upgrade to ES', () => {
- expect(findInfoAlert().exists()).toBe(false);
+ expect(findElasticsearchNotice().exists()).toBe(false);
});
it('populates environments dropdown', () => {
@@ -298,11 +301,11 @@ describe('EnvironmentLogs', () => {
});
it('refresh button, trace is refreshed', () => {
- expect(dispatch).not.toHaveBeenCalledWith(`${module}/showPodLogs`, expect.anything());
+ expect(dispatch).not.toHaveBeenCalledWith(`${module}/fetchLogs`, undefined);
findLogControlButtons().vm.$emit('refresh');
- expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, mockPodName);
+ expect(dispatch).toHaveBeenCalledWith(`${module}/fetchLogs`, undefined);
});
});
});
diff --git a/spec/frontend/logs/components/log_advanced_filters_spec.js b/spec/frontend/logs/components/log_advanced_filters_spec.js
index a6fbc40c2c6..adcd6b4fb07 100644
--- a/spec/frontend/logs/components/log_advanced_filters_spec.js
+++ b/spec/frontend/logs/components/log_advanced_filters_spec.js
@@ -1,8 +1,9 @@
-import { GlIcon, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { defaultTimeRange } from '~/vue_shared/constants';
+import { GlFilteredSearch } from '@gitlab/ui';
import { convertToFixedRange } from '~/lib/utils/datetime_range';
import { createStore } from '~/logs/stores';
+import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
import { mockPods, mockSearch } from '../mock_data';
import LogAdvancedFilters from '~/logs/components/log_advanced_filters.vue';
@@ -15,26 +16,19 @@ describe('LogAdvancedFilters', () => {
let wrapper;
let state;
- const findPodsDropdown = () => wrapper.find({ ref: 'podsDropdown' });
- const findPodsNoPodsText = () => wrapper.find({ ref: 'noPodsMsg' });
- const findPodsDropdownItems = () =>
- findPodsDropdown()
- .findAll(GlDropdownItem)
- .filter(item => !item.is('[disabled]'));
- const findPodsDropdownItemsSelected = () =>
- findPodsDropdownItems()
- .filter(item => {
- return !item.find(GlIcon).classes('invisible');
- })
- .at(0);
- const findSearchBox = () => wrapper.find({ ref: 'searchBox' });
+ const findFilteredSearch = () => wrapper.find(GlFilteredSearch);
const findTimeRangePicker = () => wrapper.find({ ref: 'dateTimePicker' });
+ const getSearchToken = type =>
+ findFilteredSearch()
+ .props('availableTokens')
+ .filter(token => token.type === type)[0];
const mockStateLoading = () => {
state.timeRange.selected = defaultTimeRange;
state.timeRange.current = convertToFixedRange(defaultTimeRange);
state.pods.options = [];
state.pods.current = null;
+ state.logs.isLoading = true;
};
const mockStateWithData = () => {
@@ -42,6 +36,7 @@ describe('LogAdvancedFilters', () => {
state.timeRange.current = convertToFixedRange(defaultTimeRange);
state.pods.options = mockPods;
state.pods.current = null;
+ state.logs.isLoading = false;
};
const initWrapper = (propsData = {}) => {
@@ -76,11 +71,18 @@ describe('LogAdvancedFilters', () => {
expect(wrapper.isVueInstance()).toBe(true);
expect(wrapper.isEmpty()).toBe(false);
- expect(findPodsDropdown().exists()).toBe(true);
- expect(findSearchBox().exists()).toBe(true);
+ expect(findFilteredSearch().exists()).toBe(true);
expect(findTimeRangePicker().exists()).toBe(true);
});
+ it('displays search tokens', () => {
+ expect(getSearchToken(TOKEN_TYPE_POD_NAME)).toMatchObject({
+ title: 'Pod name',
+ unique: true,
+ operators: [expect.objectContaining({ value: '=' })],
+ });
+ });
+
describe('disabled state', () => {
beforeEach(() => {
mockStateLoading();
@@ -90,9 +92,7 @@ describe('LogAdvancedFilters', () => {
});
it('displays disabled filters', () => {
- expect(findPodsDropdown().props('text')).toBe('All pods');
- expect(findPodsDropdown().attributes('disabled')).toBeTruthy();
- expect(findSearchBox().attributes('disabled')).toBeTruthy();
+ expect(findFilteredSearch().attributes('disabled')).toBeTruthy();
expect(findTimeRangePicker().attributes('disabled')).toBeTruthy();
});
});
@@ -103,16 +103,17 @@ describe('LogAdvancedFilters', () => {
initWrapper();
});
- it('displays a enabled filters', () => {
- expect(findPodsDropdown().props('text')).toBe('All pods');
- expect(findPodsDropdown().attributes('disabled')).toBeFalsy();
- expect(findSearchBox().attributes('disabled')).toBeFalsy();
+ it('displays a disabled search', () => {
+ expect(findFilteredSearch().attributes('disabled')).toBeTruthy();
+ });
+
+ it('displays an enable date filter', () => {
expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
});
- it('displays an empty pods dropdown', () => {
- expect(findPodsNoPodsText().exists()).toBe(true);
- expect(findPodsDropdownItems()).toHaveLength(0);
+ it('displays no pod options when no pods are available, so suggestions can be displayed', () => {
+ expect(getSearchToken(TOKEN_TYPE_POD_NAME).options).toBe(null);
+ expect(getSearchToken(TOKEN_TYPE_POD_NAME).loading).toBe(true);
});
});
@@ -122,20 +123,24 @@ describe('LogAdvancedFilters', () => {
initWrapper();
});
- it('displays an enabled pods dropdown', () => {
- expect(findPodsDropdown().attributes('disabled')).toBeFalsy();
- expect(findPodsDropdown().props('text')).toBe('All pods');
+ it('displays a single token for pods', () => {
+ initWrapper();
+
+ const tokens = findFilteredSearch().props('availableTokens');
+
+ expect(tokens).toHaveLength(1);
+ expect(tokens[0].type).toBe(TOKEN_TYPE_POD_NAME);
});
- it('displays options in a pods dropdown', () => {
- const items = findPodsDropdownItems();
- expect(items).toHaveLength(mockPods.length + 1);
+ it('displays a enabled filters', () => {
+ expect(findFilteredSearch().attributes('disabled')).toBeFalsy();
+ expect(findTimeRangePicker().attributes('disabled')).toBeFalsy();
});
- it('displays "all pods" selected in a pods dropdown', () => {
- const selected = findPodsDropdownItemsSelected();
+ it('displays options in the pods token', () => {
+ const { options } = getSearchToken(TOKEN_TYPE_POD_NAME);
- expect(selected.text()).toBe('All pods');
+ expect(options).toHaveLength(mockPods.length);
});
it('displays options in date time picker', () => {
@@ -146,30 +151,16 @@ describe('LogAdvancedFilters', () => {
});
describe('when the user interacts', () => {
- it('clicks on a all options, showPodLogs is dispatched with null', () => {
- const items = findPodsDropdownItems();
- items.at(0).vm.$emit('click');
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, null);
- });
-
- it('clicks on a pod name, showPodLogs is dispatched with pod name', () => {
- const items = findPodsDropdownItems();
- const index = 2; // any pod
+ it('clicks on the search button, showFilteredLogs is dispatched', () => {
+ findFilteredSearch().vm.$emit('submit', null);
- items.at(index + 1).vm.$emit('click'); // skip "All pods" option
-
- expect(dispatch).toHaveBeenCalledWith(`${module}/showPodLogs`, mockPods[index]);
+ expect(dispatch).toHaveBeenCalledWith(`${module}/showFilteredLogs`, null);
});
- it('clicks on search, a serches is done', () => {
- expect(findSearchBox().attributes('disabled')).toBeFalsy();
-
- // input a query and click `search`
- findSearchBox().vm.$emit('input', mockSearch);
- findSearchBox().vm.$emit('submit');
+ it('clicks on the search button, showFilteredLogs is dispatched with null', () => {
+ findFilteredSearch().vm.$emit('submit', [mockSearch]);
- expect(dispatch).toHaveBeenCalledWith(`${module}/setSearch`, mockSearch);
+ expect(dispatch).toHaveBeenCalledWith(`${module}/showFilteredLogs`, [mockSearch]);
});
it('selects a new time range', () => {
diff --git a/spec/frontend/logs/components/tokens/token_with_loading_state_spec.js b/spec/frontend/logs/components/tokens/token_with_loading_state_spec.js
new file mode 100644
index 00000000000..d98d7d05c92
--- /dev/null
+++ b/spec/frontend/logs/components/tokens/token_with_loading_state_spec.js
@@ -0,0 +1,68 @@
+import { GlFilteredSearchToken, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+
+import TokenWithLoadingState from '~/logs/components/tokens/token_with_loading_state.vue';
+
+describe('TokenWithLoadingState', () => {
+ let wrapper;
+
+ const findFilteredSearchToken = () => wrapper.find(GlFilteredSearchToken);
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+
+ const initWrapper = (props = {}, options) => {
+ wrapper = shallowMount(TokenWithLoadingState, {
+ propsData: props,
+ ...options,
+ });
+ };
+
+ beforeEach(() => {});
+
+ it('passes entire config correctly', () => {
+ const config = {
+ icon: 'pod',
+ type: 'pod',
+ title: 'Pod name',
+ unique: true,
+ };
+
+ initWrapper({ config });
+
+ expect(findFilteredSearchToken().props('config')).toEqual(config);
+ });
+
+ describe('suggestions are replaced', () => {
+ let mockNoOptsText;
+ let config;
+ let stubs;
+
+ beforeEach(() => {
+ mockNoOptsText = 'No suggestions available';
+ config = {
+ loading: false,
+ noOptionsText: mockNoOptsText,
+ };
+ stubs = {
+ GlFilteredSearchToken: {
+ template: `<div><slot name="suggestions"></slot></div>`,
+ },
+ };
+ });
+
+ it('renders a loading icon', () => {
+ config.loading = true;
+
+ initWrapper({ config }, { stubs });
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(wrapper.text()).toBe('');
+ });
+
+ it('renders an empty results message', () => {
+ initWrapper({ config }, { stubs });
+
+ expect(findLoadingIcon().exists()).toBe(false);
+ expect(wrapper.text()).toBe(mockNoOptsText);
+ });
+ });
+});
diff --git a/spec/frontend/logs/stores/actions_spec.js b/spec/frontend/logs/stores/actions_spec.js
index 882673af984..6199c400e16 100644
--- a/spec/frontend/logs/stores/actions_spec.js
+++ b/spec/frontend/logs/stores/actions_spec.js
@@ -6,7 +6,7 @@ import { convertToFixedRange } from '~/lib/utils/datetime_range';
import logsPageState from '~/logs/stores/state';
import {
setInitData,
- setSearch,
+ showFilteredLogs,
showPodLogs,
fetchEnvironments,
fetchLogs,
@@ -31,6 +31,7 @@ import {
mockCursor,
mockNextCursor,
} from '../mock_data';
+import { TOKEN_TYPE_POD_NAME } from '~/logs/constants';
jest.mock('~/flash');
jest.mock('~/lib/utils/datetime_range');
@@ -93,13 +94,80 @@ describe('Logs Store actions', () => {
));
});
- describe('setSearch', () => {
- it('should commit search mutation', () =>
+ describe('showFilteredLogs', () => {
+ it('empty search should filter with defaults', () =>
testAction(
- setSearch,
- mockSearch,
+ showFilteredLogs,
+ undefined,
state,
- [{ type: types.SET_SEARCH, payload: mockSearch }],
+ [
+ { type: types.SET_CURRENT_POD_NAME, payload: null },
+ { type: types.SET_SEARCH, payload: '' },
+ ],
+ [{ type: 'fetchLogs' }],
+ ));
+
+ it('text search should filter with a search term', () =>
+ testAction(
+ showFilteredLogs,
+ [mockSearch],
+ state,
+ [
+ { type: types.SET_CURRENT_POD_NAME, payload: null },
+ { type: types.SET_SEARCH, payload: mockSearch },
+ ],
+ [{ type: 'fetchLogs' }],
+ ));
+
+ it('pod search should filter with a search term', () =>
+ testAction(
+ showFilteredLogs,
+ [{ type: TOKEN_TYPE_POD_NAME, value: { data: mockPodName, operator: '=' } }],
+ state,
+ [
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.SET_SEARCH, payload: '' },
+ ],
+ [{ type: 'fetchLogs' }],
+ ));
+
+ it('pod search should filter with a pod selection and a search term', () =>
+ testAction(
+ showFilteredLogs,
+ [{ type: TOKEN_TYPE_POD_NAME, value: { data: mockPodName, operator: '=' } }, mockSearch],
+ state,
+ [
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.SET_SEARCH, payload: mockSearch },
+ ],
+ [{ type: 'fetchLogs' }],
+ ));
+
+ it('pod search should filter with a pod selection and two search terms', () =>
+ testAction(
+ showFilteredLogs,
+ ['term1', 'term2'],
+ state,
+ [
+ { type: types.SET_CURRENT_POD_NAME, payload: null },
+ { type: types.SET_SEARCH, payload: `term1 term2` },
+ ],
+ [{ type: 'fetchLogs' }],
+ ));
+
+ it('pod search should filter with a pod selection and a search terms before and after', () =>
+ testAction(
+ showFilteredLogs,
+ [
+ 'term1',
+ { type: TOKEN_TYPE_POD_NAME, value: { data: mockPodName, operator: '=' } },
+ 'term2',
+ ],
+ state,
+ [
+ { type: types.SET_CURRENT_POD_NAME, payload: mockPodName },
+ { type: types.SET_SEARCH, payload: `term1 term2` },
+ ],
[{ type: 'fetchLogs' }],
));
});
diff --git a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
index 88710b23bc9..d968b042ff1 100644
--- a/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
+++ b/spec/frontend/monitoring/components/__snapshots__/dashboard_template_spec.js.snap
@@ -92,7 +92,7 @@ exports[`Dashboard template matches the default snapshot 1`] = `
>
<gl-deprecated-button-stub
size="md"
- title="Reload this page"
+ title="Refresh dashboard"
variant="default"
>
<icon-stub
diff --git a/spec/frontend/monitoring/components/charts/time_series_spec.js b/spec/frontend/monitoring/components/charts/time_series_spec.js
index 3aad4c87237..870e47edde0 100644
--- a/spec/frontend/monitoring/components/charts/time_series_spec.js
+++ b/spec/frontend/monitoring/components/charts/time_series_spec.js
@@ -50,6 +50,7 @@ describe('Time series component', () => {
propsData: {
graphData: { ...graphData, type },
deploymentData: store.state.monitoringDashboard.deploymentData,
+ annotations: store.state.monitoringDashboard.annotations,
projectPath: `${mockHost}${mockProjectDir}`,
},
store,
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index b37c10791bf..c34a5afceb0 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -15,7 +15,8 @@ import {
receiveMetricsDashboardSuccess,
fetchDeploymentsData,
fetchEnvironmentsData,
- fetchPrometheusMetrics,
+ fetchDashboardData,
+ fetchAnnotations,
fetchPrometheusMetric,
setInitialState,
filterEnvironments,
@@ -24,10 +25,12 @@ import {
} from '~/monitoring/stores/actions';
import { gqClient, parseEnvironmentsResponse } from '~/monitoring/stores/utils';
import getEnvironments from '~/monitoring/queries/getEnvironments.query.graphql';
+import getAnnotations from '~/monitoring/queries/getAnnotations.query.graphql';
import storeState from '~/monitoring/stores/state';
import {
deploymentData,
environmentData,
+ annotationsData,
metricsDashboardResponse,
metricsDashboardViewModel,
dashboardGitResponse,
@@ -120,17 +123,15 @@ describe('Monitoring store actions', () => {
});
it('setting SET_ENVIRONMENTS_FILTER should dispatch fetchEnvironmentsData', () => {
- jest.spyOn(gqClient, 'mutate').mockReturnValue(
- Promise.resolve({
- data: {
- project: {
- data: {
- environments: [],
- },
+ jest.spyOn(gqClient, 'mutate').mockReturnValue({
+ data: {
+ project: {
+ data: {
+ environments: [],
},
},
- }),
- );
+ },
+ });
return testAction(
filterEnvironments,
@@ -180,17 +181,15 @@ describe('Monitoring store actions', () => {
});
it('dispatches receiveEnvironmentsDataSuccess on success', () => {
- jest.spyOn(gqClient, 'mutate').mockReturnValue(
- Promise.resolve({
- data: {
- project: {
- data: {
- environments: environmentData,
- },
+ jest.spyOn(gqClient, 'mutate').mockResolvedValue({
+ data: {
+ project: {
+ data: {
+ environments: environmentData,
},
},
- }),
- );
+ },
+ });
return testAction(
fetchEnvironmentsData,
@@ -208,7 +207,7 @@ describe('Monitoring store actions', () => {
});
it('dispatches receiveEnvironmentsDataFailure on error', () => {
- jest.spyOn(gqClient, 'mutate').mockReturnValue(Promise.reject());
+ jest.spyOn(gqClient, 'mutate').mockRejectedValue({});
return testAction(
fetchEnvironmentsData,
@@ -220,6 +219,80 @@ describe('Monitoring store actions', () => {
});
});
+ describe('fetchAnnotations', () => {
+ const { state } = store;
+ state.projectPath = 'gitlab-org/gitlab-test';
+ state.currentEnvironmentName = 'production';
+ state.currentDashboard = '.gitlab/dashboards/custom_dashboard.yml';
+
+ afterEach(() => {
+ resetStore(store);
+ });
+
+ it('fetches annotations data and dispatches receiveAnnotationsSuccess', () => {
+ const mockMutate = jest.spyOn(gqClient, 'mutate');
+ const mutationVariables = {
+ mutation: getAnnotations,
+ variables: {
+ projectPath: state.projectPath,
+ environmentName: state.currentEnvironmentName,
+ dashboardId: state.currentDashboard,
+ },
+ };
+
+ mockMutate.mockResolvedValue({
+ data: {
+ project: {
+ environment: {
+ metricDashboard: {
+ annotations: annotationsData,
+ },
+ },
+ },
+ },
+ });
+
+ return testAction(
+ fetchAnnotations,
+ null,
+ state,
+ [],
+ [
+ { type: 'requestAnnotations' },
+ { type: 'receiveAnnotationsSuccess', payload: annotationsData },
+ ],
+ () => {
+ expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
+ },
+ );
+ });
+
+ it('dispatches receiveAnnotationsFailure if the annotations API call fails', () => {
+ const mockMutate = jest.spyOn(gqClient, 'mutate');
+ const mutationVariables = {
+ mutation: getAnnotations,
+ variables: {
+ projectPath: state.projectPath,
+ environmentName: state.currentEnvironmentName,
+ dashboardId: state.currentDashboard,
+ },
+ };
+
+ mockMutate.mockRejectedValue({});
+
+ return testAction(
+ fetchAnnotations,
+ null,
+ state,
+ [],
+ [{ type: 'requestAnnotations' }, { type: 'receiveAnnotationsFailure' }],
+ () => {
+ expect(mockMutate).toHaveBeenCalledWith(mutationVariables);
+ },
+ );
+ });
+ });
+
describe('Set initial state', () => {
let mockedState;
beforeEach(() => {
@@ -375,7 +448,7 @@ describe('Monitoring store actions', () => {
metricsDashboardResponse.dashboard,
);
- expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetrics');
+ expect(dispatch).toHaveBeenCalledWith('fetchDashboardData');
});
it('sets the dashboards loaded from the repository', () => {
const params = {};
@@ -395,7 +468,7 @@ describe('Monitoring store actions', () => {
expect(commit).toHaveBeenCalledWith(types.SET_ALL_DASHBOARDS, dashboardGitResponse);
});
});
- describe('fetchPrometheusMetrics', () => {
+ describe('fetchDashboardData', () => {
let commit;
let dispatch;
let state;
@@ -413,7 +486,7 @@ describe('Monitoring store actions', () => {
const getters = {
metricsWithData: () => [],
};
- fetchPrometheusMetrics({ state, commit, dispatch, getters })
+ fetchDashboardData({ state, commit, dispatch, getters })
.then(() => {
expect(Tracking.event).toHaveBeenCalledWith(
document.body.dataset.page,
@@ -442,7 +515,7 @@ describe('Monitoring store actions', () => {
metricsWithData: () => [metric.id],
};
- fetchPrometheusMetrics({ state, commit, dispatch, getters })
+ fetchDashboardData({ state, commit, dispatch, getters })
.then(() => {
expect(dispatch).toHaveBeenCalledWith('fetchPrometheusMetric', {
metric,
@@ -478,7 +551,7 @@ describe('Monitoring store actions', () => {
dispatch.mockRejectedValueOnce(new Error('Error fetching this metric'));
dispatch.mockResolvedValue();
- fetchPrometheusMetrics({ state, commit, dispatch })
+ fetchDashboardData({ state, commit, dispatch })
.then(() => {
expect(dispatch).toHaveBeenCalledTimes(10); // one per metric plus 1 for deployments
expect(dispatch).toHaveBeenCalledWith('fetchDeploymentsData');
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 21a27a443af..34d224e13b0 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -202,15 +202,12 @@ describe('Monitoring mutations', () => {
mutations[types.REQUEST_METRIC_RESULT](stateCopy, {
metricId,
- result,
});
expect(stateCopy.showEmptyState).toBe(true);
expect(getMetric()).toEqual(
expect.objectContaining({
loading: true,
- result: null,
- state: metricStates.LOADING,
}),
);
});
@@ -232,7 +229,7 @@ describe('Monitoring mutations', () => {
});
it('adds results to the store', () => {
- expect(getMetric().result).toBe(undefined);
+ expect(getMetric().result).toBe(null);
mutations[types.RECEIVE_METRIC_RESULT_SUCCESS](stateCopy, {
metricId,
diff --git a/spec/frontend/monitoring/store/utils_spec.js b/spec/frontend/monitoring/store/utils_spec.js
index fcc5614850b..f46409e8e32 100644
--- a/spec/frontend/monitoring/store/utils_spec.js
+++ b/spec/frontend/monitoring/store/utils_spec.js
@@ -5,6 +5,7 @@ import {
removeLeadingSlash,
mapToDashboardViewModel,
} from '~/monitoring/stores/utils';
+import { NOT_IN_DB_PREFIX } from '~/monitoring/constants';
const projectPath = 'gitlab-org/gitlab-test';
@@ -256,6 +257,9 @@ describe('mapToDashboardViewModel', () => {
expect(getMappedMetric(dashboard)).toEqual({
label: expect.any(String),
metricId: expect.any(String),
+ loading: false,
+ result: null,
+ state: null,
});
});
@@ -307,7 +311,7 @@ describe('mapToDashboardViewModel', () => {
describe('uniqMetricsId', () => {
[
- { input: { id: 1 }, expected: 'NO_DB_1' },
+ { input: { id: 1 }, expected: `${NOT_IN_DB_PREFIX}_1` },
{ input: { metric_id: 2 }, expected: '2_undefined' },
{ input: { metric_id: 2, id: 21 }, expected: '2_21' },
{ input: { metric_id: 22, id: 1 }, expected: '22_1' },
diff --git a/spec/frontend/prometheus_metrics/custom_metrics_spec.js b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
index 3396b3694c0..97b8f7bd913 100644
--- a/spec/frontend/prometheus_metrics/custom_metrics_spec.js
+++ b/spec/frontend/prometheus_metrics/custom_metrics_spec.js
@@ -1,5 +1,5 @@
import MockAdapter from 'axios-mock-adapter';
-import PrometheusMetrics from '~/prometheus_metrics/custom_metrics';
+import CustomMetrics from '~/prometheus_metrics/custom_metrics';
import axios from '~/lib/utils/axios_utils';
import PANEL_STATE from '~/prometheus_metrics/constants';
import metrics from './mock_data';
@@ -24,99 +24,99 @@ describe('PrometheusMetrics', () => {
});
describe('Custom Metrics', () => {
- let prometheusMetrics;
+ let customMetrics;
beforeEach(() => {
- prometheusMetrics = new PrometheusMetrics('.js-prometheus-metrics-monitoring');
+ customMetrics = new CustomMetrics('.js-prometheus-metrics-monitoring');
});
it('should initialize wrapper element refs on the class object', () => {
- expect(prometheusMetrics.$wrapperCustomMetrics).not.toBeNull();
- expect(prometheusMetrics.$monitoredCustomMetricsPanel).not.toBeNull();
- expect(prometheusMetrics.$monitoredCustomMetricsCount).not.toBeNull();
- expect(prometheusMetrics.$monitoredCustomMetricsLoading).not.toBeNull();
- expect(prometheusMetrics.$monitoredCustomMetricsEmpty).not.toBeNull();
- expect(prometheusMetrics.$monitoredCustomMetricsList).not.toBeNull();
- expect(prometheusMetrics.$newCustomMetricButton).not.toBeNull();
- expect(prometheusMetrics.$flashCustomMetricsContainer).not.toBeNull();
+ expect(customMetrics.$wrapperCustomMetrics).not.toBeNull();
+ expect(customMetrics.$monitoredCustomMetricsPanel).not.toBeNull();
+ expect(customMetrics.$monitoredCustomMetricsCount).not.toBeNull();
+ expect(customMetrics.$monitoredCustomMetricsLoading).not.toBeNull();
+ expect(customMetrics.$monitoredCustomMetricsEmpty).not.toBeNull();
+ expect(customMetrics.$monitoredCustomMetricsList).not.toBeNull();
+ expect(customMetrics.$newCustomMetricButton).not.toBeNull();
+ expect(customMetrics.$flashCustomMetricsContainer).not.toBeNull();
});
it('should contain api endpoints', () => {
- expect(prometheusMetrics.activeCustomMetricsEndpoint).toEqual(customMetricsEndpoint);
+ expect(customMetrics.activeCustomMetricsEndpoint).toEqual(customMetricsEndpoint);
});
it('should show loading state when called with `loading`', () => {
- prometheusMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LOADING);
+ customMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LOADING);
- expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toEqual(false);
- expect(prometheusMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
expect(
- prometheusMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
+ customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
).toBeTruthy();
- expect(prometheusMetrics.$newCustomMetricButton.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
});
it('should show metrics list when called with `list`', () => {
- prometheusMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LIST);
+ customMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.LIST);
- expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
expect(
- prometheusMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
+ customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
).toBeTruthy();
- expect(prometheusMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
- expect(prometheusMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
});
it('should show empty state when called with `empty`', () => {
- prometheusMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.EMPTY);
+ customMetrics.showMonitoringCustomMetricsPanelState(PANEL_STATE.EMPTY);
- expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toEqual(false);
- expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
expect(
- prometheusMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
+ customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
).toBeTruthy();
- expect(prometheusMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
- expect(prometheusMetrics.$newCustomMetricText.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toEqual(false);
});
it('should show monitored metrics list', () => {
- prometheusMetrics.customMetrics = metrics;
- prometheusMetrics.populateCustomMetrics();
+ customMetrics.customMetrics = metrics;
+ customMetrics.populateCustomMetrics();
- const $metricsListLi = prometheusMetrics.$monitoredCustomMetricsList.find('li');
+ const $metricsListLi = customMetrics.$monitoredCustomMetricsList.find('li');
- expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toEqual(false);
expect(
- prometheusMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
+ customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden'),
).toBeTruthy();
- expect(prometheusMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
- expect(prometheusMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
expect($metricsListLi.length).toEqual(metrics.length);
});
it('should show the NO-INTEGRATION empty state', () => {
- prometheusMetrics.setNoIntegrationActiveState();
+ customMetrics.setNoIntegrationActiveState();
- expect(prometheusMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toEqual(false);
- expect(prometheusMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden')).toEqual(
+ expect(customMetrics.$monitoredCustomMetricsEmpty.hasClass('hidden')).toEqual(false);
+ expect(customMetrics.$monitoredCustomMetricsNoIntegrationText.hasClass('hidden')).toEqual(
false,
);
- expect(prometheusMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$newCustomMetricButton.hasClass('hidden')).toBeTruthy();
- expect(prometheusMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsLoading.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$monitoredCustomMetricsList.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricButton.hasClass('hidden')).toBeTruthy();
+ expect(customMetrics.$newCustomMetricText.hasClass('hidden')).toBeTruthy();
});
});
});
diff --git a/spec/frontend/releases/components/app_edit_spec.js b/spec/frontend/releases/components/app_edit_spec.js
index bf66f5a5183..09bafe4aa9b 100644
--- a/spec/frontend/releases/components/app_edit_spec.js
+++ b/spec/frontend/releases/components/app_edit_spec.js
@@ -5,14 +5,16 @@ import { release as originalRelease } from '../mock_data';
import * as commonUtils from '~/lib/utils/common_utils';
import { BACK_URL_PARAM } from '~/releases/constants';
import AssetLinksForm from '~/releases/components/asset_links_form.vue';
+import { merge } from 'lodash';
describe('Release edit component', () => {
let wrapper;
let release;
let actions;
+ let getters;
let state;
- const factory = (featureFlags = {}) => {
+ const factory = ({ featureFlags = {}, store: storeUpdates = {} } = {}) => {
state = {
release,
markdownDocsPath: 'path/to/markdown/docs',
@@ -26,15 +28,30 @@ describe('Release edit component', () => {
addEmptyAssetLink: jest.fn(),
};
- const store = new Vuex.Store({
- modules: {
- detail: {
- namespaced: true,
- actions,
- state,
+ getters = {
+ isValid: () => true,
+ validationErrors: () => ({
+ assets: {
+ links: [],
},
- },
- });
+ }),
+ };
+
+ const store = new Vuex.Store(
+ merge(
+ {
+ modules: {
+ detail: {
+ namespaced: true,
+ actions,
+ state,
+ getters,
+ },
+ },
+ },
+ storeUpdates,
+ ),
+ );
wrapper = mount(ReleaseEditApp, {
store,
@@ -55,6 +72,8 @@ describe('Release edit component', () => {
wrapper = null;
});
+ const findSubmitButton = () => wrapper.find('button[type=submit]');
+
describe(`basic functionality tests: all tests unrelated to the "${BACK_URL_PARAM}" parameter`, () => {
beforeEach(() => {
factory();
@@ -101,7 +120,7 @@ describe('Release edit component', () => {
});
it('renders the "Save changes" button as type="submit"', () => {
- expect(wrapper.find('.js-submit-button').attributes('type')).toBe('submit');
+ expect(findSubmitButton().attributes('type')).toBe('submit');
});
it('calls updateRelease when the form is submitted', () => {
@@ -143,7 +162,7 @@ describe('Release edit component', () => {
describe('when the release_asset_link_editing feature flag is disabled', () => {
beforeEach(() => {
- factory({ releaseAssetLinkEditing: false });
+ factory({ featureFlags: { releaseAssetLinkEditing: false } });
});
it('does not render the asset links portion of the form', () => {
@@ -153,7 +172,7 @@ describe('Release edit component', () => {
describe('when the release_asset_link_editing feature flag is enabled', () => {
beforeEach(() => {
- factory({ releaseAssetLinkEditing: true });
+ factory({ featureFlags: { releaseAssetLinkEditing: true } });
});
it('renders the asset links portion of the form', () => {
@@ -161,4 +180,46 @@ describe('Release edit component', () => {
});
});
});
+
+ describe('validation', () => {
+ describe('when the form is valid', () => {
+ beforeEach(() => {
+ factory({
+ store: {
+ modules: {
+ detail: {
+ getters: {
+ isValid: () => true,
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('renders the submit button as enabled', () => {
+ expect(findSubmitButton().attributes('disabled')).toBeUndefined();
+ });
+ });
+
+ describe('when the form is invalid', () => {
+ beforeEach(() => {
+ factory({
+ store: {
+ modules: {
+ detail: {
+ getters: {
+ isValid: () => false,
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('renders the submit button as disabled', () => {
+ expect(findSubmitButton().attributes('disabled')).toBe('disabled');
+ });
+ });
+ });
});
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
new file mode 100644
index 00000000000..44542868cfe
--- /dev/null
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -0,0 +1,229 @@
+import Vuex from 'vuex';
+import { mount, createLocalVue } from '@vue/test-utils';
+import AssetLinksForm from '~/releases/components/asset_links_form.vue';
+import { release as originalRelease } from '../mock_data';
+import * as commonUtils from '~/lib/utils/common_utils';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Release edit component', () => {
+ let wrapper;
+ let release;
+ let actions;
+ let getters;
+ let state;
+
+ const factory = ({ release: overriddenRelease, linkErrors } = {}) => {
+ state = {
+ release: overriddenRelease || release,
+ releaseAssetsDocsPath: 'path/to/release/assets/docs',
+ };
+
+ actions = {
+ addEmptyAssetLink: jest.fn(),
+ updateAssetLinkUrl: jest.fn(),
+ updateAssetLinkName: jest.fn(),
+ removeAssetLink: jest.fn().mockImplementation((_context, linkId) => {
+ state.release.assets.links = state.release.assets.links.filter(l => l.id !== linkId);
+ }),
+ };
+
+ getters = {
+ validationErrors: () => ({
+ assets: {
+ links: linkErrors || {},
+ },
+ }),
+ };
+
+ const store = new Vuex.Store({
+ modules: {
+ detail: {
+ namespaced: true,
+ actions,
+ state,
+ getters,
+ },
+ },
+ });
+
+ wrapper = mount(AssetLinksForm, {
+ localVue,
+ store,
+ });
+ };
+
+ beforeEach(() => {
+ release = commonUtils.convertObjectPropsToCamelCase(originalRelease, { deep: true });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('with a basic store state', () => {
+ beforeEach(() => {
+ factory();
+ });
+
+ it('calls the "addEmptyAssetLink" store method when the "Add another link" button is clicked', () => {
+ expect(actions.addEmptyAssetLink).not.toHaveBeenCalled();
+
+ wrapper.find({ ref: 'addAnotherLinkButton' }).vm.$emit('click');
+
+ expect(actions.addEmptyAssetLink).toHaveBeenCalledTimes(1);
+ });
+
+ it('calls the "removeAssetLinks" store method when the remove button is clicked', () => {
+ expect(actions.removeAssetLink).not.toHaveBeenCalled();
+
+ wrapper.find('.remove-button').vm.$emit('click');
+
+ expect(actions.removeAssetLink).toHaveBeenCalledTimes(1);
+ });
+
+ it('calls the "updateAssetLinkUrl" store method when text is entered into the "URL" input field', () => {
+ const linkIdToUpdate = release.assets.links[0].id;
+ const newUrl = 'updated url';
+
+ expect(actions.updateAssetLinkUrl).not.toHaveBeenCalled();
+
+ wrapper.find({ ref: 'urlInput' }).vm.$emit('change', newUrl);
+
+ expect(actions.updateAssetLinkUrl).toHaveBeenCalledTimes(1);
+ expect(actions.updateAssetLinkUrl).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ linkIdToUpdate,
+ newUrl,
+ },
+ undefined,
+ );
+ });
+
+ it('calls the "updateAssetLinName" store method when text is entered into the "Link title" input field', () => {
+ const linkIdToUpdate = release.assets.links[0].id;
+ const newName = 'updated name';
+
+ expect(actions.updateAssetLinkName).not.toHaveBeenCalled();
+
+ wrapper.find({ ref: 'nameInput' }).vm.$emit('change', newName);
+
+ expect(actions.updateAssetLinkName).toHaveBeenCalledTimes(1);
+ expect(actions.updateAssetLinkName).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ linkIdToUpdate,
+ newName,
+ },
+ undefined,
+ );
+ });
+ });
+
+ describe('validation', () => {
+ let linkId;
+
+ beforeEach(() => {
+ linkId = release.assets.links[0].id;
+ });
+
+ const findUrlValidationMessage = () => wrapper.find('.url-field .invalid-feedback');
+ const findNameValidationMessage = () => wrapper.find('.link-title-field .invalid-feedback');
+
+ it('does not show any validation messages if there are no validation errors', () => {
+ factory();
+
+ expect(findUrlValidationMessage().exists()).toBe(false);
+ expect(findNameValidationMessage().exists()).toBe(false);
+ });
+
+ it('shows a validation error message when two links have the same URLs', () => {
+ factory({
+ linkErrors: {
+ [linkId]: { isDuplicate: true },
+ },
+ });
+
+ expect(findUrlValidationMessage().text()).toBe(
+ 'This URL is already used for another link; duplicate URLs are not allowed',
+ );
+ });
+
+ it('shows a validation error message when a URL has a bad format', () => {
+ factory({
+ linkErrors: {
+ [linkId]: { isBadFormat: true },
+ },
+ });
+
+ expect(findUrlValidationMessage().text()).toBe(
+ 'URL must start with http://, https://, or ftp://',
+ );
+ });
+
+ it('shows a validation error message when the URL is empty (and the title is not empty)', () => {
+ factory({
+ linkErrors: {
+ [linkId]: { isUrlEmpty: true },
+ },
+ });
+
+ expect(findUrlValidationMessage().text()).toBe('URL is required');
+ });
+
+ it('shows a validation error message when the title is empty (and the URL is not empty)', () => {
+ factory({
+ linkErrors: {
+ [linkId]: { isNameEmpty: true },
+ },
+ });
+
+ expect(findNameValidationMessage().text()).toBe('Link title is required');
+ });
+ });
+
+ describe('empty state', () => {
+ describe('when the release fetched from the API has no links', () => {
+ beforeEach(() => {
+ factory({
+ release: {
+ ...release,
+ assets: {
+ links: [],
+ },
+ },
+ });
+ });
+
+ it('calls the addEmptyAssetLink store method when the component is created', () => {
+ expect(actions.addEmptyAssetLink).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('when the release fetched from the API has one link', () => {
+ beforeEach(() => {
+ factory({
+ release: {
+ ...release,
+ assets: {
+ links: release.assets.links.slice(0, 1),
+ },
+ },
+ });
+ });
+
+ it('does not call the addEmptyAssetLink store method when the component is created', () => {
+ expect(actions.addEmptyAssetLink).not.toHaveBeenCalled();
+ });
+
+ it('calls addEmptyAssetLink when the final link is deleted by the user', () => {
+ wrapper.find('.remove-button').vm.$emit('click');
+
+ expect(actions.addEmptyAssetLink).toHaveBeenCalledTimes(1);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 7dc95c24055..8945ad97c93 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -56,4 +56,158 @@ describe('Release detail getters', () => {
expect(getters.releaseLinksToDelete(state)).toEqual(originalLinks);
});
});
+
+ describe('validationErrors', () => {
+ describe('when the form is valid', () => {
+ it('returns no validation errors', () => {
+ const state = {
+ release: {
+ assets: {
+ links: [
+ { id: 1, url: 'https://example.com/valid', name: 'Link 1' },
+ { id: 2, url: '', name: '' },
+ { id: 3, url: '', name: ' ' },
+ { id: 4, url: ' ', name: '' },
+ { id: 5, url: ' ', name: ' ' },
+ ],
+ },
+ },
+ };
+
+ const expectedErrors = {
+ assets: {
+ links: {
+ 1: {},
+ 2: {},
+ 3: {},
+ 4: {},
+ 5: {},
+ },
+ },
+ };
+
+ expect(getters.validationErrors(state)).toEqual(expectedErrors);
+ });
+ });
+
+ describe('when the form is invalid', () => {
+ let actualErrors;
+
+ beforeEach(() => {
+ const state = {
+ release: {
+ assets: {
+ links: [
+ // Duplicate URLs
+ { id: 1, url: 'https://example.com/duplicate', name: 'Link 1' },
+ { id: 2, url: 'https://example.com/duplicate', name: 'Link 2' },
+
+ // the validation check ignores leading/trailing
+ // whitespace and is case-insensitive
+ { id: 3, url: ' \tHTTPS://EXAMPLE.COM/DUPLICATE\n\r\n ', name: 'Link 3' },
+
+ // Invalid URL format
+ { id: 4, url: 'invalid', name: 'Link 4' },
+
+ // Missing URL
+ { id: 5, url: '', name: 'Link 5' },
+ { id: 6, url: ' ', name: 'Link 6' },
+
+ // Missing title
+ { id: 7, url: 'https://example.com/valid/1', name: '' },
+ { id: 8, url: 'https://example.com/valid/2', name: ' ' },
+ ],
+ },
+ },
+ };
+
+ actualErrors = getters.validationErrors(state);
+ });
+
+ it('returns a validation errors if links share a URL', () => {
+ const expectedErrors = {
+ assets: {
+ links: {
+ 1: { isDuplicate: true },
+ 2: { isDuplicate: true },
+ 3: { isDuplicate: true },
+ },
+ },
+ };
+
+ expect(actualErrors).toMatchObject(expectedErrors);
+ });
+
+ it('returns a validation error if the URL is in the wrong format', () => {
+ const expectedErrors = {
+ assets: {
+ links: {
+ 4: { isBadFormat: true },
+ },
+ },
+ };
+
+ expect(actualErrors).toMatchObject(expectedErrors);
+ });
+
+ it('returns a validation error if the URL missing (and the title is populated)', () => {
+ const expectedErrors = {
+ assets: {
+ links: {
+ 6: { isUrlEmpty: true },
+ 5: { isUrlEmpty: true },
+ },
+ },
+ };
+
+ expect(actualErrors).toMatchObject(expectedErrors);
+ });
+
+ it('returns a validation error if the title missing (and the URL is populated)', () => {
+ const expectedErrors = {
+ assets: {
+ links: {
+ 7: { isNameEmpty: true },
+ 8: { isNameEmpty: true },
+ },
+ },
+ };
+
+ expect(actualErrors).toMatchObject(expectedErrors);
+ });
+ });
+ });
+
+ describe('isValid', () => {
+ // the value of state is not actually used by this getter
+ const state = {};
+
+ it('returns true when the form is valid', () => {
+ const mockGetters = {
+ validationErrors: {
+ assets: {
+ links: {
+ 1: {},
+ },
+ },
+ },
+ };
+
+ expect(getters.isValid(state, mockGetters)).toBe(true);
+ });
+
+ it('returns false when the form is invalid', () => {
+ const mockGetters = {
+ validationErrors: {
+ assets: {
+ links: {
+ 1: { isNameEmpty: true },
+ },
+ },
+ },
+ };
+
+ expect(getters.isValid(state, mockGetters)).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/smart_interval_spec.js b/spec/frontend/smart_interval_spec.js
new file mode 100644
index 00000000000..b32ac99e4e4
--- /dev/null
+++ b/spec/frontend/smart_interval_spec.js
@@ -0,0 +1,197 @@
+import $ from 'jquery';
+import { assignIn } from 'lodash';
+import waitForPromises from 'helpers/wait_for_promises';
+import SmartInterval from '~/smart_interval';
+
+jest.useFakeTimers();
+
+let interval;
+
+describe('SmartInterval', () => {
+ const DEFAULT_MAX_INTERVAL = 100;
+ const DEFAULT_STARTING_INTERVAL = 5;
+ const DEFAULT_INCREMENT_FACTOR = 2;
+
+ function createDefaultSmartInterval(config) {
+ const defaultParams = {
+ callback: () => Promise.resolve(),
+ startingInterval: DEFAULT_STARTING_INTERVAL,
+ maxInterval: DEFAULT_MAX_INTERVAL,
+ incrementByFactorOf: DEFAULT_INCREMENT_FACTOR,
+ lazyStart: false,
+ immediateExecution: false,
+ hiddenInterval: null,
+ };
+
+ if (config) {
+ assignIn(defaultParams, config);
+ }
+
+ return new SmartInterval(defaultParams);
+ }
+
+ afterEach(() => {
+ interval.destroy();
+ });
+
+ describe('Increment Interval', () => {
+ it('should increment the interval delay', () => {
+ interval = createDefaultSmartInterval();
+
+ jest.runOnlyPendingTimers();
+
+ return waitForPromises().then(() => {
+ const intervalConfig = interval.cfg;
+ const iterationCount = 4;
+ const maxIntervalAfterIterations =
+ intervalConfig.startingInterval * intervalConfig.incrementByFactorOf ** iterationCount;
+ const currentInterval = interval.getCurrentInterval();
+
+ // Provide some flexibility for performance of testing environment
+ expect(currentInterval).toBeGreaterThan(intervalConfig.startingInterval);
+ expect(currentInterval).toBeLessThanOrEqual(maxIntervalAfterIterations);
+ });
+ });
+
+ it('should not increment past maxInterval', () => {
+ interval = createDefaultSmartInterval({ maxInterval: DEFAULT_STARTING_INTERVAL });
+
+ jest.runOnlyPendingTimers();
+
+ return waitForPromises().then(() => {
+ const currentInterval = interval.getCurrentInterval();
+
+ expect(currentInterval).toBe(interval.cfg.maxInterval);
+ });
+ });
+
+ it('does not increment while waiting for callback', () => {
+ interval = createDefaultSmartInterval({
+ callback: () => new Promise($.noop),
+ });
+
+ jest.runOnlyPendingTimers();
+
+ return waitForPromises().then(() => {
+ const oneInterval = interval.cfg.startingInterval * DEFAULT_INCREMENT_FACTOR;
+
+ expect(interval.getCurrentInterval()).toEqual(oneInterval);
+ });
+ });
+ });
+
+ describe('Public methods', () => {
+ beforeEach(() => {
+ interval = createDefaultSmartInterval();
+ });
+
+ it('should cancel an interval', () => {
+ jest.runOnlyPendingTimers();
+
+ interval.cancel();
+
+ return waitForPromises().then(() => {
+ const { intervalId } = interval.state;
+ const currentInterval = interval.getCurrentInterval();
+ const intervalLowerLimit = interval.cfg.startingInterval;
+
+ expect(intervalId).toBeUndefined();
+ expect(currentInterval).toBe(intervalLowerLimit);
+ });
+ });
+
+ it('should resume an interval', () => {
+ jest.runOnlyPendingTimers();
+
+ interval.cancel();
+
+ interval.resume();
+
+ return waitForPromises().then(() => {
+ const { intervalId } = interval.state;
+
+ expect(intervalId).toBeTruthy();
+ });
+ });
+ });
+
+ describe('DOM Events', () => {
+ beforeEach(() => {
+ // This ensures DOM and DOM events are initialized for these specs.
+ setFixtures('<div></div>');
+
+ interval = createDefaultSmartInterval();
+ });
+
+ it('should pause when page is not visible', () => {
+ jest.runOnlyPendingTimers();
+
+ return waitForPromises().then(() => {
+ expect(interval.state.intervalId).toBeTruthy();
+
+ // simulates triggering of visibilitychange event
+ interval.onVisibilityChange({ target: { visibilityState: 'hidden' } });
+
+ expect(interval.state.intervalId).toBeUndefined();
+ });
+ });
+
+ it('should change to the hidden interval when page is not visible', () => {
+ interval.destroy();
+
+ const HIDDEN_INTERVAL = 1500;
+ interval = createDefaultSmartInterval({ hiddenInterval: HIDDEN_INTERVAL });
+
+ jest.runOnlyPendingTimers();
+
+ return waitForPromises().then(() => {
+ expect(interval.state.intervalId).toBeTruthy();
+ expect(
+ interval.getCurrentInterval() >= DEFAULT_STARTING_INTERVAL &&
+ interval.getCurrentInterval() <= DEFAULT_MAX_INTERVAL,
+ ).toBeTruthy();
+
+ // simulates triggering of visibilitychange event
+ interval.onVisibilityChange({ target: { visibilityState: 'hidden' } });
+
+ expect(interval.state.intervalId).toBeTruthy();
+ expect(interval.getCurrentInterval()).toBe(HIDDEN_INTERVAL);
+ });
+ });
+
+ it('should resume when page is becomes visible at the previous interval', () => {
+ jest.runOnlyPendingTimers();
+
+ return waitForPromises().then(() => {
+ expect(interval.state.intervalId).toBeTruthy();
+
+ // simulates triggering of visibilitychange event
+ interval.onVisibilityChange({ target: { visibilityState: 'hidden' } });
+
+ expect(interval.state.intervalId).toBeUndefined();
+
+ // simulates triggering of visibilitychange event
+ interval.onVisibilityChange({ target: { visibilityState: 'visible' } });
+
+ expect(interval.state.intervalId).toBeTruthy();
+ });
+ });
+
+ it('should cancel on page unload', () => {
+ jest.runOnlyPendingTimers();
+
+ return waitForPromises().then(() => {
+ $(document).triggerHandler('beforeunload');
+
+ expect(interval.state.intervalId).toBeUndefined();
+ expect(interval.getCurrentInterval()).toBe(interval.cfg.startingInterval);
+ });
+ });
+
+ it('should execute callback before first interval', () => {
+ interval = createDefaultSmartInterval({ immediateExecution: true });
+
+ expect(interval.cfg.immediateExecution).toBeFalsy();
+ });
+ });
+});
diff --git a/spec/frontend/snippets/components/snippet_blob_view_spec.js b/spec/frontend/snippets/components/snippet_blob_view_spec.js
index c4f1dd0ca35..1f6038bc7f0 100644
--- a/spec/frontend/snippets/components/snippet_blob_view_spec.js
+++ b/spec/frontend/snippets/components/snippet_blob_view_spec.js
@@ -1,5 +1,4 @@
import { mount } from '@vue/test-utils';
-import { GlLoadingIcon } from '@gitlab/ui';
import SnippetBlobView from '~/snippets/components/snippet_blob_view.vue';
import BlobHeader from '~/blob/components/blob_header.vue';
import BlobEmbeddable from '~/blob/components/blob_embeddable.vue';
@@ -19,23 +18,15 @@ describe('Blob Embeddable', () => {
id: 'gid://foo.bar/snippet',
webUrl: 'https://foo.bar',
visibilityLevel: SNIPPET_VISIBILITY_PUBLIC,
+ blob: BlobMock,
};
const dataMock = {
- blob: BlobMock,
activeViewerType: SimpleViewerMock.type,
};
- function createComponent(
- props = {},
- data = dataMock,
- blobLoading = false,
- contentLoading = false,
- ) {
+ function createComponent(props = {}, data = dataMock, contentLoading = false) {
const $apollo = {
queries: {
- blob: {
- loading: blobLoading,
- },
blobContent: {
loading: contentLoading,
},
@@ -87,12 +78,6 @@ describe('Blob Embeddable', () => {
expect(wrapper.find(BlobEmbeddable).exists()).toBe(true);
});
- it('shows loading icon while blob data is in flight', () => {
- createComponent({}, dataMock, true);
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.find('.snippet-file-content').exists()).toBe(false);
- });
-
it('sets simple viewer correctly', () => {
createComponent();
expect(wrapper.find(SimpleViewer).exists()).toBe(true);
@@ -133,14 +118,14 @@ describe('Blob Embeddable', () => {
});
it('renders simple viewer by default if URL contains hash', () => {
- createComponent();
+ createComponent({}, {});
expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
expect(wrapper.find(SimpleViewer).exists()).toBe(true);
});
describe('switchViewer()', () => {
- it('by default switches to the passed viewer', () => {
+ it('switches to the passed viewer', () => {
createComponent();
wrapper.vm.switchViewer(RichViewerMock.type);
@@ -157,22 +142,6 @@ describe('Blob Embeddable', () => {
expect(wrapper.find(SimpleViewer).exists()).toBe(true);
});
});
-
- it('respects hash over richViewer in the blob when corresponding parameter is passed', () => {
- createComponent(
- {},
- {
- blob: BlobMock,
- },
- );
- expect(wrapper.vm.blob.richViewer).toEqual(expect.any(Object));
-
- wrapper.vm.switchViewer(RichViewerMock.type, true);
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.activeViewerType).toBe(SimpleViewerMock.type);
- expect(wrapper.find(SimpleViewer).exists()).toBe(true);
- });
- });
});
});
});
diff --git a/spec/frontend/static_site_editor/components/static_site_editor_spec.js b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
index a40f8edbeb2..2c4fa0e061a 100644
--- a/spec/frontend/static_site_editor/components/static_site_editor_spec.js
+++ b/spec/frontend/static_site_editor/components/static_site_editor_spec.js
@@ -30,7 +30,6 @@ describe('StaticSiteEditor', () => {
store = new Vuex.Store({
state: createState(initialState),
getters: {
- isContentLoaded: () => false,
contentChanged: () => false,
...getters,
},
@@ -43,9 +42,11 @@ describe('StaticSiteEditor', () => {
};
const buildContentLoadedStore = ({ initialState, getters } = {}) => {
buildStore({
- initialState,
+ initialState: {
+ isContentLoaded: true,
+ ...initialState,
+ },
getters: {
- isContentLoaded: () => true,
...getters,
},
});
@@ -85,7 +86,7 @@ describe('StaticSiteEditor', () => {
const content = 'edit area content';
beforeEach(() => {
- buildStore({ initialState: { content }, getters: { isContentLoaded: () => true } });
+ buildContentLoadedStore({ initialState: { content } });
buildWrapper();
});
diff --git a/spec/frontend/static_site_editor/mock_data.js b/spec/frontend/static_site_editor/mock_data.js
index 9e1c14515e6..1993636ab12 100644
--- a/spec/frontend/static_site_editor/mock_data.js
+++ b/spec/frontend/static_site_editor/mock_data.js
@@ -34,3 +34,11 @@ export const savedContentMeta = {
};
export const submitChangesError = 'Could not save changes';
+export const commitMultipleResponse = {
+ short_id: 'ed899a2f4b5',
+ web_url: '/commit/ed899a2f4b5',
+};
+export const createMergeRequestResponse = {
+ iid: '123',
+ web_url: '/merge_requests/123',
+};
diff --git a/spec/frontend/static_site_editor/services/generate_branch_name_spec.js b/spec/frontend/static_site_editor/services/generate_branch_name_spec.js
new file mode 100644
index 00000000000..0624fc3b7b4
--- /dev/null
+++ b/spec/frontend/static_site_editor/services/generate_branch_name_spec.js
@@ -0,0 +1,22 @@
+import { DEFAULT_TARGET_BRANCH, BRANCH_SUFFIX_COUNT } from '~/static_site_editor/constants';
+import generateBranchName from '~/static_site_editor/services/generate_branch_name';
+
+import { username } from '../mock_data';
+
+describe('generateBranchName', () => {
+ const timestamp = 12345678901234;
+
+ beforeEach(() => {
+ jest.spyOn(Date, 'now').mockReturnValueOnce(timestamp);
+ });
+
+ it('generates a name that includes the username and target branch', () => {
+ expect(generateBranchName(username)).toMatch(`${username}-${DEFAULT_TARGET_BRANCH}`);
+ });
+
+ it(`adds the first ${BRANCH_SUFFIX_COUNT} numbers of the current timestamp`, () => {
+ expect(generateBranchName(username)).toMatch(
+ timestamp.toString().substring(BRANCH_SUFFIX_COUNT),
+ );
+ });
+});
diff --git a/spec/frontend/static_site_editor/services/submit_content_changes_spec.js b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
new file mode 100644
index 00000000000..9a0bd88b57d
--- /dev/null
+++ b/spec/frontend/static_site_editor/services/submit_content_changes_spec.js
@@ -0,0 +1,131 @@
+import Api from '~/api';
+import { convertObjectPropsToSnakeCase } from '~/lib/utils/common_utils';
+
+import {
+ DEFAULT_TARGET_BRANCH,
+ SUBMIT_CHANGES_BRANCH_ERROR,
+ SUBMIT_CHANGES_COMMIT_ERROR,
+ SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
+} from '~/static_site_editor/constants';
+import generateBranchName from '~/static_site_editor/services/generate_branch_name';
+import submitContentChanges from '~/static_site_editor/services/submit_content_changes';
+
+import {
+ username,
+ projectId,
+ commitMultipleResponse,
+ createMergeRequestResponse,
+ sourcePath,
+ sourceContent as content,
+} from '../mock_data';
+
+jest.mock('~/static_site_editor/services/generate_branch_name');
+
+describe('submitContentChanges', () => {
+ const mergeRequestTitle = `Update ${sourcePath} file`;
+ const branch = 'branch-name';
+
+ beforeEach(() => {
+ jest.spyOn(Api, 'createBranch').mockResolvedValue();
+ jest.spyOn(Api, 'commitMultiple').mockResolvedValue({ data: commitMultipleResponse });
+ jest
+ .spyOn(Api, 'createProjectMergeRequest')
+ .mockResolvedValue({ data: createMergeRequestResponse });
+
+ generateBranchName.mockReturnValue(branch);
+ });
+
+ it('creates a branch named after the username and target branch', () => {
+ return submitContentChanges({ username, projectId }).then(() => {
+ expect(Api.createBranch).toHaveBeenCalledWith(projectId, {
+ ref: DEFAULT_TARGET_BRANCH,
+ branch,
+ });
+ });
+ });
+
+ it('notifies error when branch could not be created', () => {
+ Api.createBranch.mockRejectedValueOnce();
+
+ expect(submitContentChanges({ username, projectId })).rejects.toThrow(
+ SUBMIT_CHANGES_BRANCH_ERROR,
+ );
+ });
+
+ it('commits the content changes to the branch when creating branch succeeds', () => {
+ return submitContentChanges({ username, projectId, sourcePath, content }).then(() => {
+ expect(Api.commitMultiple).toHaveBeenCalledWith(projectId, {
+ branch,
+ commit_message: mergeRequestTitle,
+ actions: [
+ {
+ action: 'update',
+ file_path: sourcePath,
+ content,
+ },
+ ],
+ });
+ });
+ });
+
+ it('notifies error when content could not be committed', () => {
+ Api.commitMultiple.mockRejectedValueOnce();
+
+ expect(submitContentChanges({ username, projectId })).rejects.toThrow(
+ SUBMIT_CHANGES_COMMIT_ERROR,
+ );
+ });
+
+ it('creates a merge request when commiting changes succeeds', () => {
+ return submitContentChanges({ username, projectId, sourcePath, content }).then(() => {
+ expect(Api.createProjectMergeRequest).toHaveBeenCalledWith(
+ projectId,
+ convertObjectPropsToSnakeCase({
+ title: mergeRequestTitle,
+ targetBranch: DEFAULT_TARGET_BRANCH,
+ sourceBranch: branch,
+ }),
+ );
+ });
+ });
+
+ it('notifies error when merge request could not be created', () => {
+ Api.createProjectMergeRequest.mockRejectedValueOnce();
+
+ expect(submitContentChanges({ username, projectId })).rejects.toThrow(
+ SUBMIT_CHANGES_MERGE_REQUEST_ERROR,
+ );
+ });
+
+ describe('when changes are submitted successfully', () => {
+ let result;
+
+ beforeEach(() => {
+ return submitContentChanges({ username, projectId, sourcePath, content }).then(_result => {
+ result = _result;
+ });
+ });
+
+ it('returns the branch name', () => {
+ expect(result).toMatchObject({ branch: { label: branch } });
+ });
+
+ it('returns commit short id and web url', () => {
+ expect(result).toMatchObject({
+ commit: {
+ label: commitMultipleResponse.short_id,
+ url: commitMultipleResponse.web_url,
+ },
+ });
+ });
+
+ it('returns merge request iid and web url', () => {
+ expect(result).toMatchObject({
+ mergeRequest: {
+ label: createMergeRequestResponse.iid,
+ url: createMergeRequestResponse.web_url,
+ },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/static_site_editor/store/getters_spec.js b/spec/frontend/static_site_editor/store/getters_spec.js
index 1b482db9366..5793e344784 100644
--- a/spec/frontend/static_site_editor/store/getters_spec.js
+++ b/spec/frontend/static_site_editor/store/getters_spec.js
@@ -1,18 +1,8 @@
import createState from '~/static_site_editor/store/state';
-import { isContentLoaded, contentChanged } from '~/static_site_editor/store/getters';
+import { contentChanged } from '~/static_site_editor/store/getters';
import { sourceContent as content } from '../mock_data';
describe('Static Site Editor Store getters', () => {
- describe('isContentLoaded', () => {
- it('returns true when originalContent is not empty', () => {
- expect(isContentLoaded(createState({ originalContent: content }))).toBe(true);
- });
-
- it('returns false when originalContent is empty', () => {
- expect(isContentLoaded(createState({ originalContent: '' }))).toBe(false);
- });
- });
-
describe('contentChanged', () => {
it('returns true when content and originalContent are different', () => {
const state = createState({ content, originalContent: 'something else' });
diff --git a/spec/frontend/static_site_editor/store/mutations_spec.js b/spec/frontend/static_site_editor/store/mutations_spec.js
index 1fd687eed4a..0b213c11a04 100644
--- a/spec/frontend/static_site_editor/store/mutations_spec.js
+++ b/spec/frontend/static_site_editor/store/mutations_spec.js
@@ -19,6 +19,7 @@ describe('Static Site Editor Store mutations', () => {
mutation | stateProperty | payload | expectedValue
${types.LOAD_CONTENT} | ${'isLoadingContent'} | ${undefined} | ${true}
${types.RECEIVE_CONTENT_SUCCESS} | ${'isLoadingContent'} | ${contentLoadedPayload} | ${false}
+ ${types.RECEIVE_CONTENT_SUCCESS} | ${'isContentLoaded'} | ${contentLoadedPayload} | ${true}
${types.RECEIVE_CONTENT_SUCCESS} | ${'title'} | ${contentLoadedPayload} | ${title}
${types.RECEIVE_CONTENT_SUCCESS} | ${'content'} | ${contentLoadedPayload} | ${content}
${types.RECEIVE_CONTENT_SUCCESS} | ${'originalContent'} | ${contentLoadedPayload} | ${content}
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_terraform_plan_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_terraform_plan_spec.js
new file mode 100644
index 00000000000..1951b56587a
--- /dev/null
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_terraform_plan_spec.js
@@ -0,0 +1,89 @@
+import { GlLoadingIcon, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import axios from '~/lib/utils/axios_utils';
+import MockAdapter from 'axios-mock-adapter';
+import MrWidgetTerraformPlan from '~/vue_merge_request_widget/components/mr_widget_terraform_plan.vue';
+
+const plan = {
+ create: 10,
+ update: 20,
+ delete: 30,
+ job_path: '/path/to/ci/logs',
+};
+
+describe('MrWidgetTerraformPlan', () => {
+ let mock;
+ let wrapper;
+
+ const propsData = { endpoint: '/path/to/terraform/report.json' };
+
+ const mockPollingApi = (response, body, header) => {
+ mock.onGet(propsData.endpoint).reply(response, body, header);
+ };
+
+ const mountWrapper = () => {
+ wrapper = shallowMount(MrWidgetTerraformPlan, { propsData });
+ return axios.waitForAll();
+ };
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ mock.restore();
+ });
+
+ describe('loading poll', () => {
+ beforeEach(() => {
+ mockPollingApi(200, { 'tfplan.json': plan }, {});
+
+ return mountWrapper().then(() => {
+ wrapper.setData({ loading: true });
+ return wrapper.vm.$nextTick();
+ });
+ });
+
+ it('Diplays loading icon when loading is true', () => {
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+
+ expect(wrapper.find(GlSprintf).exists()).toBe(false);
+
+ expect(wrapper.text()).not.toContain(
+ 'A terraform report was generated in your pipelines. Changes are unknown',
+ );
+ });
+ });
+
+ describe('successful poll', () => {
+ beforeEach(() => {
+ mockPollingApi(200, { 'tfplan.json': plan }, {});
+ return mountWrapper();
+ });
+
+ it('content change text', () => {
+ expect(wrapper.find(GlSprintf).exists()).toBe(true);
+ });
+
+ it('renders button when url is found', () => {
+ expect(wrapper.find('a').text()).toContain('View full log');
+ });
+ });
+
+ describe('polling fails', () => {
+ beforeEach(() => {
+ mockPollingApi(500, null, {});
+ return mountWrapper();
+ });
+
+ it('does not display changes text when api fails', () => {
+ expect(wrapper.text()).toContain(
+ 'A terraform report was generated in your pipelines. Changes are unknown',
+ );
+
+ expect(wrapper.find('.js-terraform-report-link').exists()).toBe(false);
+ expect(wrapper.text()).not.toContain('View full log');
+ });
+ });
+});
diff --git a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
index ef95cb1b8f2..e022f68fdec 100644
--- a/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
+++ b/spec/frontend/vue_mr_widget/mr_widget_options_spec.js
@@ -273,25 +273,6 @@ describe('mrWidgetOptions', () => {
};
});
- it('should not tell service to check status if document is not visible', () => {
- Object.defineProperty(document, 'visibilityState', {
- value: 'hidden',
- configurable: true,
- });
- vm.checkStatus(cb);
-
- return vm.$nextTick().then(() => {
- expect(vm.service.checkStatus).not.toHaveBeenCalled();
- expect(vm.mr.setData).not.toHaveBeenCalled();
- expect(vm.handleNotification).not.toHaveBeenCalled();
- expect(isCbExecuted).toBeFalsy();
- Object.defineProperty(document, 'visibilityState', {
- value: 'visible',
- configurable: true,
- });
- });
- });
-
it('should tell service to check status if document is visible', () => {
vm.checkStatus(cb);
diff --git a/spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap b/spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap
new file mode 100644
index 00000000000..2189d6ac3cc
--- /dev/null
+++ b/spec/frontend/vue_shared/components/form/__snapshots__/form_footer_actions_spec.js.snap
@@ -0,0 +1,19 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Form Footer Actions renders content properly 1`] = `
+<footer
+ class="form-actions d-flex justify-content-between"
+>
+ <div>
+ Bar
+ </div>
+
+ <div>
+ Foo
+ </div>
+
+ <div>
+ Abrakadabra
+ </div>
+</footer>
+`;
diff --git a/spec/frontend/vue_shared/components/form/form_footer_actions_spec.js b/spec/frontend/vue_shared/components/form/form_footer_actions_spec.js
new file mode 100644
index 00000000000..30e16bd12da
--- /dev/null
+++ b/spec/frontend/vue_shared/components/form/form_footer_actions_spec.js
@@ -0,0 +1,29 @@
+import FormFooterActions from '~/vue_shared/components/form/form_footer_actions.vue';
+import { shallowMount } from '@vue/test-utils';
+
+describe('Form Footer Actions', () => {
+ let wrapper;
+
+ function createComponent(slots = {}) {
+ wrapper = shallowMount(FormFooterActions, {
+ slots,
+ });
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders content properly', () => {
+ const defaultSlot = 'Foo';
+ const prepend = 'Bar';
+ const append = 'Abrakadabra';
+ createComponent({
+ default: defaultSlot,
+ prepend,
+ append,
+ });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
index 02c6409a9a6..b894dce3e17 100644
--- a/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_request_pipelines_resolver_spec.rb
@@ -14,7 +14,7 @@ describe Resolvers::MergeRequestPipelinesResolver do
sha: merge_request.diff_head_sha
)
end
- let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project) }
+ let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: merge_request.source_project, ref: 'other-ref') }
let_it_be(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
diff --git a/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb b/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb
index 4d060d213ed..47889126531 100644
--- a/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb
+++ b/spec/graphql/resolvers/projects/jira_imports_resolver_spec.rb
@@ -18,22 +18,20 @@ describe Resolvers::Projects::JiraImportsResolver do
it_behaves_like 'no jira import access'
end
-
- context 'when user developer' do
- before do
- project.add_developer(user)
- end
-
- it_behaves_like 'no jira import access'
- end
end
context 'when user can read Jira import data' do
before do
- project.add_maintainer(user)
+ project.add_guest(user)
end
it_behaves_like 'no jira import data present'
+
+ it 'does not raise access error' do
+ expect do
+ resolve_imports
+ end.not_to raise_error
+ end
end
end
@@ -58,19 +56,11 @@ describe Resolvers::Projects::JiraImportsResolver do
it_behaves_like 'no jira import access'
end
-
- context 'when user developer' do
- before do
- project.add_developer(user)
- end
-
- it_behaves_like 'no jira import access'
- end
end
context 'when user can access Jira imports' do
before do
- project.add_maintainer(user)
+ project.add_guest(user)
end
it 'returns Jira imports sorted ascending by created_at time' do
diff --git a/spec/javascripts/ide/components/repo_commit_section_spec.js b/spec/javascripts/ide/components/repo_commit_section_spec.js
deleted file mode 100644
index 0ba8c86a036..00000000000
--- a/spec/javascripts/ide/components/repo_commit_section_spec.js
+++ /dev/null
@@ -1,113 +0,0 @@
-import Vue from 'vue';
-import { createComponentWithStore } from 'spec/helpers/vue_mount_component_helper';
-import store from '~/ide/stores';
-import router from '~/ide/ide_router';
-import repoCommitSection from '~/ide/components/repo_commit_section.vue';
-import { file, resetStore } from '../helpers';
-
-describe('RepoCommitSection', () => {
- let vm;
-
- function createComponent() {
- const Component = Vue.extend(repoCommitSection);
-
- store.state.noChangesStateSvgPath = 'svg';
- store.state.committedStateSvgPath = 'commitsvg';
-
- vm = createComponentWithStore(Component, store);
-
- vm.$store.state.currentProjectId = 'abcproject';
- vm.$store.state.currentBranchId = 'master';
- vm.$store.state.projects.abcproject = {
- web_url: '',
- branches: {
- master: {
- workingReference: '1',
- },
- },
- };
-
- const files = [file('file1'), file('file2')].map(f =>
- Object.assign(f, {
- type: 'blob',
- content: 'orginal content',
- }),
- );
-
- vm.$store.state.rightPanelCollapsed = false;
- vm.$store.state.currentBranch = 'master';
- vm.$store.state.changedFiles = [];
- vm.$store.state.stagedFiles = [{ ...files[0] }, { ...files[1] }];
- vm.$store.state.stagedFiles.forEach(f =>
- Object.assign(f, {
- changed: true,
- content: 'testing',
- }),
- );
-
- files.forEach(f => {
- vm.$store.state.entries[f.path] = f;
- });
-
- return vm;
- }
-
- beforeEach(done => {
- spyOn(router, 'push');
-
- vm = createComponent();
-
- spyOn(vm, 'openPendingTab').and.callThrough();
-
- vm.$mount();
-
- Vue.nextTick(done);
- });
-
- afterEach(() => {
- vm.$destroy();
-
- resetStore(vm.$store);
- });
-
- describe('empty Stage', () => {
- it('renders no changes text', () => {
- resetStore(vm.$store);
- const Component = Vue.extend(repoCommitSection);
-
- store.state.noChangesStateSvgPath = 'nochangessvg';
- store.state.committedStateSvgPath = 'svg';
-
- vm.$destroy();
- vm = createComponentWithStore(Component, store).$mount();
-
- expect(vm.$el.querySelector('.js-empty-state').textContent.trim()).toContain('No changes');
- expect(vm.$el.querySelector('.js-empty-state img').getAttribute('src')).toBe('nochangessvg');
- });
- });
-
- it('renders a commit section', () => {
- const changedFileElements = [...vm.$el.querySelectorAll('.multi-file-commit-list > li')];
- const allFiles = vm.$store.state.changedFiles.concat(vm.$store.state.stagedFiles);
-
- expect(changedFileElements).toHaveLength(2);
-
- changedFileElements.forEach((changedFile, i) => {
- expect(changedFile.textContent.trim()).toContain(allFiles[i].path);
- });
- });
-
- describe('mounted', () => {
- it('opens last opened file', () => {
- expect(store.state.openFiles.length).toBe(1);
- expect(store.state.openFiles[0].pending).toBe(true);
- });
-
- it('calls openPendingTab', () => {
- expect(vm.openPendingTab).toHaveBeenCalledWith({
- file: vm.lastOpenedFile,
- keyPrefix: 'unstaged',
- });
- });
- });
-});
diff --git a/spec/javascripts/smart_interval_spec.js b/spec/javascripts/smart_interval_spec.js
deleted file mode 100644
index 0dc9ee9d79a..00000000000
--- a/spec/javascripts/smart_interval_spec.js
+++ /dev/null
@@ -1,234 +0,0 @@
-import $ from 'jquery';
-import { assignIn } from 'lodash';
-import waitForPromises from 'spec/helpers/wait_for_promises';
-import SmartInterval from '~/smart_interval';
-
-describe('SmartInterval', function() {
- const DEFAULT_MAX_INTERVAL = 100;
- const DEFAULT_STARTING_INTERVAL = 5;
- const DEFAULT_SHORT_TIMEOUT = 75;
- const DEFAULT_INCREMENT_FACTOR = 2;
-
- function createDefaultSmartInterval(config) {
- const defaultParams = {
- callback: () => Promise.resolve(),
- startingInterval: DEFAULT_STARTING_INTERVAL,
- maxInterval: DEFAULT_MAX_INTERVAL,
- incrementByFactorOf: DEFAULT_INCREMENT_FACTOR,
- lazyStart: false,
- immediateExecution: false,
- hiddenInterval: null,
- };
-
- if (config) {
- assignIn(defaultParams, config);
- }
-
- return new SmartInterval(defaultParams);
- }
-
- beforeEach(() => {
- jasmine.clock().install();
- });
-
- afterEach(() => {
- jasmine.clock().uninstall();
- });
-
- describe('Increment Interval', function() {
- it('should increment the interval delay', done => {
- const smartInterval = createDefaultSmartInterval();
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- waitForPromises()
- .then(() => {
- const intervalConfig = smartInterval.cfg;
- const iterationCount = 4;
- const maxIntervalAfterIterations =
- intervalConfig.startingInterval * intervalConfig.incrementByFactorOf ** iterationCount;
- const currentInterval = smartInterval.getCurrentInterval();
-
- // Provide some flexibility for performance of testing environment
- expect(currentInterval).toBeGreaterThan(intervalConfig.startingInterval);
- expect(currentInterval).toBeLessThanOrEqual(maxIntervalAfterIterations);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should not increment past maxInterval', done => {
- const smartInterval = createDefaultSmartInterval({ maxInterval: DEFAULT_STARTING_INTERVAL });
-
- jasmine.clock().tick(DEFAULT_STARTING_INTERVAL);
- jasmine.clock().tick(DEFAULT_STARTING_INTERVAL * DEFAULT_INCREMENT_FACTOR);
-
- waitForPromises()
- .then(() => {
- const currentInterval = smartInterval.getCurrentInterval();
-
- expect(currentInterval).toBe(smartInterval.cfg.maxInterval);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('does not increment while waiting for callback', done => {
- const smartInterval = createDefaultSmartInterval({
- callback: () => new Promise($.noop),
- });
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- waitForPromises()
- .then(() => {
- const oneInterval = smartInterval.cfg.startingInterval * DEFAULT_INCREMENT_FACTOR;
-
- expect(smartInterval.getCurrentInterval()).toEqual(oneInterval);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('Public methods', function() {
- beforeEach(function() {
- this.smartInterval = createDefaultSmartInterval();
- });
-
- it('should cancel an interval', function(done) {
- const interval = this.smartInterval;
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- interval.cancel();
-
- waitForPromises()
- .then(() => {
- const { intervalId } = interval.state;
- const currentInterval = interval.getCurrentInterval();
- const intervalLowerLimit = interval.cfg.startingInterval;
-
- expect(intervalId).toBeUndefined();
- expect(currentInterval).toBe(intervalLowerLimit);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should resume an interval', function(done) {
- const interval = this.smartInterval;
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- interval.cancel();
-
- interval.resume();
-
- waitForPromises()
- .then(() => {
- const { intervalId } = interval.state;
-
- expect(intervalId).toBeTruthy();
- })
- .then(done)
- .catch(done.fail);
- });
- });
-
- describe('DOM Events', function() {
- beforeEach(function() {
- // This ensures DOM and DOM events are initialized for these specs.
- setFixtures('<div></div>');
-
- this.smartInterval = createDefaultSmartInterval();
- });
-
- it('should pause when page is not visible', function(done) {
- const interval = this.smartInterval;
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- waitForPromises()
- .then(() => {
- expect(interval.state.intervalId).toBeTruthy();
-
- // simulates triggering of visibilitychange event
- interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } });
-
- expect(interval.state.intervalId).toBeUndefined();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should change to the hidden interval when page is not visible', done => {
- const HIDDEN_INTERVAL = 1500;
- const interval = createDefaultSmartInterval({ hiddenInterval: HIDDEN_INTERVAL });
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- waitForPromises()
- .then(() => {
- expect(interval.state.intervalId).toBeTruthy();
- expect(
- interval.getCurrentInterval() >= DEFAULT_STARTING_INTERVAL &&
- interval.getCurrentInterval() <= DEFAULT_MAX_INTERVAL,
- ).toBeTruthy();
-
- // simulates triggering of visibilitychange event
- interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } });
-
- expect(interval.state.intervalId).toBeTruthy();
- expect(interval.getCurrentInterval()).toBe(HIDDEN_INTERVAL);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should resume when page is becomes visible at the previous interval', function(done) {
- const interval = this.smartInterval;
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- waitForPromises()
- .then(() => {
- expect(interval.state.intervalId).toBeTruthy();
-
- // simulates triggering of visibilitychange event
- interval.handleVisibilityChange({ target: { visibilityState: 'hidden' } });
-
- expect(interval.state.intervalId).toBeUndefined();
-
- // simulates triggering of visibilitychange event
- interval.handleVisibilityChange({ target: { visibilityState: 'visible' } });
-
- expect(interval.state.intervalId).toBeTruthy();
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should cancel on page unload', function(done) {
- const interval = this.smartInterval;
-
- jasmine.clock().tick(DEFAULT_SHORT_TIMEOUT);
-
- waitForPromises()
- .then(() => {
- $(document).triggerHandler('beforeunload');
-
- expect(interval.state.intervalId).toBeUndefined();
- expect(interval.getCurrentInterval()).toBe(interval.cfg.startingInterval);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should execute callback before first interval', function() {
- const interval = createDefaultSmartInterval({ immediateExecution: true });
-
- expect(interval.cfg.immediateExecution).toBeFalsy();
- });
- });
-});
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index ce60a19a7b3..a0a8767637e 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -30,7 +30,7 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'optional_scopes contains all non-default scopes' do
stub_container_registry_config(enabled: true)
- expect(subject.optional_scopes).to eq %i[read_user read_api read_repository write_repository read_registry sudo openid profile email]
+ expect(subject.optional_scopes).to eq %i[read_user read_api read_repository write_repository read_registry write_registry sudo openid profile email]
end
end
@@ -38,21 +38,21 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
it 'contains all non-default scopes' do
stub_container_registry_config(enabled: true)
- expect(subject.all_available_scopes).to eq %i[api read_user read_api read_repository write_repository read_registry sudo]
+ expect(subject.all_available_scopes).to eq %i[api read_user read_api read_repository write_repository read_registry write_registry sudo]
end
it 'contains for non-admin user all non-default scopes without ADMIN access' do
stub_container_registry_config(enabled: true)
user = create(:user, admin: false)
- expect(subject.available_scopes_for(user)).to eq %i[api read_user read_api read_repository write_repository read_registry]
+ expect(subject.available_scopes_for(user)).to eq %i[api read_user read_api read_repository write_repository read_registry write_registry]
end
it 'contains for admin user all non-default scopes with ADMIN access' do
stub_container_registry_config(enabled: true)
user = create(:user, admin: true)
- expect(subject.available_scopes_for(user)).to eq %i[api read_user read_api read_repository write_repository read_registry sudo]
+ expect(subject.available_scopes_for(user)).to eq %i[api read_user read_api read_repository write_repository read_registry write_registry sudo]
end
context 'registry_scopes' do
@@ -72,7 +72,7 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
end
it 'contains all registry related scopes' do
- expect(subject.registry_scopes).to eq %i[read_registry]
+ expect(subject.registry_scopes).to eq %i[read_registry write_registry]
end
end
end
@@ -401,6 +401,49 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'while using deploy tokens' do
let(:auth_failure) { Gitlab::Auth::Result.new(nil, nil) }
+ shared_examples 'registry token scope' do
+ it 'fails when login is not valid' do
+ expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
+ .to eq(auth_failure)
+ end
+
+ it 'fails when token is not valid' do
+ expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip'))
+ .to eq(auth_failure)
+ end
+
+ it 'fails if token is nil' do
+ expect(gl_auth.find_for_git_client(login, nil, project: nil, ip: 'ip'))
+ .to eq(auth_failure)
+ end
+
+ it 'fails if token is not related to project' do
+ expect(gl_auth.find_for_git_client(login, 'abcdef', project: nil, ip: 'ip'))
+ .to eq(auth_failure)
+ end
+
+ it 'fails if token has been revoked' do
+ deploy_token.revoke!
+
+ expect(deploy_token.revoked?).to be_truthy
+ expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: nil, ip: 'ip'))
+ .to eq(auth_failure)
+ end
+ end
+
+ shared_examples 'deploy token with disabled registry' do
+ context 'when registry disabled' do
+ before do
+ stub_container_registry_config(enabled: false)
+ end
+
+ it 'fails when login and token are valid' do
+ expect(gl_auth.find_for_git_client(login, deploy_token.token, project: nil, ip: 'ip'))
+ .to eq(auth_failure)
+ end
+ end
+ end
+
context 'when deploy token and user have the same username' do
let(:username) { 'normal_user' }
let(:user) { create(:user, username: username, password: 'my-secret') }
@@ -425,34 +468,33 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'and belong to the same project' do
let!(:read_registry) { create(:deploy_token, username: 'deployer', read_repository: false, projects: [project]) }
let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [project]) }
+ let(:auth_success) { Gitlab::Auth::Result.new(read_repository, project, :deploy_token, [:download_code]) }
it 'succeeds for the right token' do
- auth_success = Gitlab::Auth::Result.new(read_repository, project, :deploy_token, [:download_code])
-
expect(gl_auth.find_for_git_client('deployer', read_repository.token, project: project, ip: 'ip'))
.to eq(auth_success)
end
it 'fails for the wrong token' do
expect(gl_auth.find_for_git_client('deployer', read_registry.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ .not_to eq(auth_success)
end
end
context 'and belong to different projects' do
+ let_it_be(:other_project) { create(:project) }
let!(:read_registry) { create(:deploy_token, username: 'deployer', read_repository: false, projects: [project]) }
- let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [project]) }
+ let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [other_project]) }
+ let(:auth_success) { Gitlab::Auth::Result.new(read_repository, other_project, :deploy_token, [:download_code]) }
it 'succeeds for the right token' do
- auth_success = Gitlab::Auth::Result.new(read_repository, project, :deploy_token, [:download_code])
-
- expect(gl_auth.find_for_git_client('deployer', read_repository.token, project: project, ip: 'ip'))
+ expect(gl_auth.find_for_git_client('deployer', read_repository.token, project: other_project, ip: 'ip'))
.to eq(auth_success)
end
it 'fails for the wrong token' do
- expect(gl_auth.find_for_git_client('deployer', read_registry.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
+ expect(gl_auth.find_for_git_client('deployer', read_registry.token, project: other_project, ip: 'ip'))
+ .not_to eq(auth_success)
end
end
end
@@ -542,45 +584,32 @@ describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
.to eq(auth_success)
end
- it 'fails when login is not valid' do
- expect(gl_auth.find_for_git_client('random_login', deploy_token.token, project: project, ip: 'ip'))
- .to eq(auth_failure)
- end
+ it_behaves_like 'registry token scope'
+ end
- it 'fails when token is not valid' do
- expect(gl_auth.find_for_git_client(login, '123123', project: project, ip: 'ip'))
- .to eq(auth_failure)
- end
+ it_behaves_like 'deploy token with disabled registry'
+ end
- it 'fails if token is nil' do
- expect(gl_auth.find_for_git_client(login, nil, project: nil, ip: 'ip'))
- .to eq(auth_failure)
- end
+ context 'when the deploy token has write_registry as a scope' do
+ let_it_be(:deploy_token) { create(:deploy_token, write_registry: true, read_repository: false, read_registry: false, projects: [project]) }
+ let_it_be(:login) { deploy_token.username }
- it 'fails if token is not related to project' do
- expect(gl_auth.find_for_git_client(login, 'abcdef', project: nil, ip: 'ip'))
- .to eq(auth_failure)
+ context 'when registry enabled' do
+ before do
+ stub_container_registry_config(enabled: true)
end
- it 'fails if token has been revoked' do
- deploy_token.revoke!
-
- expect(deploy_token.revoked?).to be_truthy
- expect(gl_auth.find_for_git_client('deploy-token', deploy_token.token, project: nil, ip: 'ip'))
- .to eq(auth_failure)
- end
- end
+ it 'succeeds when login and a project token are valid' do
+ auth_success = Gitlab::Auth::Result.new(deploy_token, project, :deploy_token, [:create_container_image])
- context 'when registry disabled' do
- before do
- stub_container_registry_config(enabled: false)
+ expect(gl_auth.find_for_git_client(login, deploy_token.token, project: project, ip: 'ip'))
+ .to eq(auth_success)
end
- it 'fails when login and token are valid' do
- expect(gl_auth.find_for_git_client(login, deploy_token.token, project: nil, ip: 'ip'))
- .to eq(auth_failure)
- end
+ it_behaves_like 'registry token scope'
end
+
+ it_behaves_like 'deploy token with disabled registry'
end
end
end
diff --git a/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb b/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb
new file mode 100644
index 00000000000..fdabc8e8f7c
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_deployment_clusters_from_deployments_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::BackgroundMigration::BackfillDeploymentClustersFromDeployments, :migration, schema: 20200227140242 do
+ subject { described_class.new }
+
+ describe '#perform' do
+ it 'backfills deployment_cluster for all deployments in the given range with a non-null cluster_id' do
+ deployment_clusters = table(:deployment_clusters)
+
+ namespace = table(:namespaces).create(name: 'the-namespace', path: 'the-path')
+ project = table(:projects).create(name: 'the-project', namespace_id: namespace.id)
+ environment = table(:environments).create(name: 'the-environment', project_id: project.id, slug: 'slug')
+ cluster = table(:clusters).create(name: 'the-cluster')
+
+ deployment_data = { cluster_id: cluster.id, project_id: project.id, environment_id: environment.id, ref: 'abc', tag: false, sha: 'sha', status: 1 }
+ expected_deployment_1 = create_deployment(**deployment_data)
+ create_deployment(**deployment_data, cluster_id: nil) # no cluster_id
+ expected_deployment_2 = create_deployment(**deployment_data)
+ out_of_range_deployment = create_deployment(**deployment_data, cluster_id: cluster.id) # expected to be out of range
+
+ # to test "ON CONFLICT DO NOTHING"
+ existing_record_for_deployment_2 = deployment_clusters.create(
+ deployment_id: expected_deployment_2.id,
+ cluster_id: expected_deployment_2.cluster_id,
+ kubernetes_namespace: 'production'
+ )
+
+ subject.perform(expected_deployment_1.id, out_of_range_deployment.id - 1)
+
+ expect(deployment_clusters.all.pluck(:deployment_id, :cluster_id, :kubernetes_namespace)).to contain_exactly(
+ [expected_deployment_1.id, cluster.id, nil],
+ [expected_deployment_2.id, cluster.id, existing_record_for_deployment_2.kubernetes_namespace]
+ )
+ end
+
+ def create_deployment(**data)
+ @iid ||= 0
+ @iid += 1
+ table(:deployments).create(iid: @iid, **data)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 7842323d009..7be84b8f980 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -86,10 +86,6 @@ describe Gitlab::Database::BatchCount do
end
describe '#batch_distinct_count' do
- it 'counts with :id field' do
- expect(described_class.batch_distinct_count(model, :id)).to eq(5)
- end
-
it 'counts with column field' do
expect(described_class.batch_distinct_count(model, column)).to eq(2)
end
@@ -137,6 +133,12 @@ describe Gitlab::Database::BatchCount do
it 'returns fallback if batch size is less than min required' do
expect(described_class.batch_distinct_count(model, column, batch_size: small_batch_size)).to eq(fallback)
end
+
+ it 'will raise an error if distinct count with the :id column is requested' do
+ expect do
+ described_class.batch_count(described_class.batch_distinct_count(model, :id))
+ end.to raise_error 'Use distinct count only with non id fields'
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 8b765ce122d..3db9320c021 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -2046,4 +2046,333 @@ describe Gitlab::Database::MigrationHelpers do
model.bulk_migrate_in(10.minutes, [%w(Class hello world)])
end
end
+
+ describe '#check_constraint_name' do
+ it 'returns a valid constraint name' do
+ name = model.check_constraint_name(:this_is_a_very_long_table_name,
+ :with_a_very_long_column_name,
+ :with_a_very_long_type)
+
+ expect(name).to be_an_instance_of(String)
+ expect(name).to start_with('check_')
+ expect(name.length).to eq(16)
+ end
+ end
+
+ describe '#check_constraint_exists?' do
+ before do
+ ActiveRecord::Base.connection.execute(
+ 'ALTER TABLE projects ADD CONSTRAINT check_1 CHECK (char_length(path) <= 5) NOT VALID'
+ )
+ end
+
+ after do
+ ActiveRecord::Base.connection.execute(
+ 'ALTER TABLE projects DROP CONSTRAINT IF EXISTS check_1'
+ )
+ end
+
+ it 'returns true if a constraint exists' do
+ expect(model.check_constraint_exists?(:projects, 'check_1'))
+ .to be_truthy
+ end
+
+ it 'returns false if a constraint does not exist' do
+ expect(model.check_constraint_exists?(:projects, 'this_does_not_exist'))
+ .to be_falsy
+ end
+
+ it 'returns false if a constraint with the same name exists in another table' do
+ expect(model.check_constraint_exists?(:users, 'check_1'))
+ .to be_falsy
+ end
+ end
+
+ describe '#add_check_constraint' do
+ before do
+ allow(model).to receive(:check_constraint_exists?).and_return(false)
+ end
+
+ context 'inside a transaction' do
+ it 'raises an error' do
+ expect(model).to receive(:transaction_open?).and_return(true)
+
+ expect do
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ 'check_name_not_null'
+ )
+ end.to raise_error(RuntimeError)
+ end
+ end
+
+ context 'outside a transaction' do
+ before do
+ allow(model).to receive(:transaction_open?).and_return(false)
+ end
+
+ context 'when the constraint is already defined in the database' do
+ it 'does not create a constraint' do
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(true)
+
+ expect(model).not_to receive(:execute).with(/ADD CONSTRAINT/)
+
+ # setting validate: false to only focus on the ADD CONSTRAINT command
+ model.add_check_constraint(
+ :test_table,
+ 'name IS NOT NULL',
+ 'check_name_not_null',
+ validate: false
+ )
+ end
+ end
+
+ context 'when the constraint is not defined in the database' do
+ it 'creates the constraint' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
+
+ # setting validate: false to only focus on the ADD CONSTRAINT command
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null',
+ validate: false
+ )
+ end
+ end
+
+ context 'when validate is not provided' do
+ it 'performs validation' do
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(false).exactly(1)
+
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
+
+ # we need the check constraint to exist so that the validation proceeds
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(true).exactly(1)
+
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null'
+ )
+ end
+ end
+
+ context 'when validate is provided with a falsey value' do
+ it 'skips validation' do
+ expect(model).not_to receive(:disable_statement_timeout)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT/)
+ expect(model).not_to receive(:execute).with(/VALIDATE CONSTRAINT/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null',
+ validate: false
+ )
+ end
+ end
+
+ context 'when validate is provided with a truthy value' do
+ it 'performs validation' do
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(false).exactly(1)
+
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(/ADD CONSTRAINT check_name_not_null/)
+
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, 'check_name_not_null')
+ .and_return(true).exactly(1)
+
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+
+ model.add_check_constraint(
+ :test_table,
+ 'char_length(name) <= 255',
+ 'check_name_not_null',
+ validate: true
+ )
+ end
+ end
+ end
+ end
+
+ describe '#validate_check_constraint' do
+ context 'when the constraint does not exist' do
+ it 'raises an error' do
+ error_message = /Could not find check constraint "check_1" on table "test_table"/
+
+ expect(model).to receive(:check_constraint_exists?).and_return(false)
+
+ expect do
+ model.validate_check_constraint(:test_table, 'check_1')
+ end.to raise_error(RuntimeError, error_message)
+ end
+ end
+
+ context 'when the constraint exists' do
+ it 'performs validation' do
+ validate_sql = /ALTER TABLE test_table VALIDATE CONSTRAINT check_name/
+
+ expect(model).to receive(:check_constraint_exists?).and_return(true)
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:execute).with(/statement_timeout/)
+ expect(model).to receive(:execute).ordered.with(validate_sql)
+ expect(model).to receive(:execute).ordered.with(/RESET ALL/)
+
+ model.validate_check_constraint(:test_table, 'check_name')
+ end
+ end
+ end
+
+ describe '#remove_check_constraint' do
+ it 'removes the constraint' do
+ drop_sql = /ALTER TABLE test_table\s+DROP CONSTRAINT IF EXISTS check_name/
+
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:execute).with(drop_sql)
+
+ model.remove_check_constraint(:test_table, 'check_name')
+ end
+ end
+
+ describe '#add_text_limit' do
+ context 'when it is called with the default options' do
+ it 'calls add_check_constraint with an infered constraint name and validate: true' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+ check = "char_length(name) <= 255"
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:add_check_constraint)
+ .with(:test_table, check, constraint_name, validate: true)
+
+ model.add_text_limit(:test_table, :name, 255)
+ end
+ end
+
+ context 'when all parameters are provided' do
+ it 'calls add_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_limit'
+ check = "char_length(name) <= 255"
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:add_check_constraint)
+ .with(:test_table, check, constraint_name, validate: false)
+
+ model.add_text_limit(
+ :test_table,
+ :name,
+ 255,
+ constraint_name: constraint_name,
+ validate: false
+ )
+ end
+ end
+ end
+
+ describe '#validate_text_limit' do
+ context 'when constraint_name is not provided' do
+ it 'calls validate_check_constraint with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:validate_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.validate_text_limit(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls validate_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_limit'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:validate_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.validate_text_limit(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#remove_text_limit' do
+ context 'when constraint_name is not provided' do
+ it 'calls remove_check_constraint with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:remove_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.remove_text_limit(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls remove_check_constraint with the correct parameters' do
+ constraint_name = 'check_name_limit'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:remove_check_constraint)
+ .with(:test_table, constraint_name)
+
+ model.remove_text_limit(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
+
+ describe '#check_text_limit_exists?' do
+ context 'when constraint_name is not provided' do
+ it 'calls check_constraint_exists? with an infered constraint name' do
+ constraint_name = model.check_constraint_name(:test_table,
+ :name,
+ 'max_length')
+
+ expect(model).to receive(:check_constraint_name).and_call_original
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, constraint_name)
+
+ model.check_text_limit_exists?(:test_table, :name)
+ end
+ end
+
+ context 'when constraint_name is provided' do
+ it 'calls check_constraint_exists? with the correct parameters' do
+ constraint_name = 'check_name_limit'
+
+ expect(model).not_to receive(:check_constraint_name)
+ expect(model).to receive(:check_constraint_exists?)
+ .with(:test_table, constraint_name)
+
+ model.check_text_limit_exists?(:test_table, :name, constraint_name: constraint_name)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
index 40b784fdb87..85e3bc14cdc 100644
--- a/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
+++ b/spec/lib/gitlab/import_export/json/ndjson_reader_spec.rb
@@ -21,7 +21,7 @@ describe Gitlab::ImportExport::JSON::NdjsonReader do
describe '#exist?' do
subject { ndjson_reader.exist? }
- context 'given valid dir_path' do
+ context 'given valid dir_path', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/213843' do
let(:dir_path) { fixture }
it { is_expected.to be true }
diff --git a/spec/lib/gitlab/jira_import/labels_importer_spec.rb b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
index 2d0e2bc6b53..3eb4666a74f 100644
--- a/spec/lib/gitlab/jira_import/labels_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/labels_importer_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
describe Gitlab::JiraImport::LabelsImporter do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
- let_it_be(:jira_import) { create(:jira_import_state, project: project) }
let_it_be(:jira_service) { create(:jira_service, project: project) }
subject { described_class.new(project).execute }
@@ -15,29 +14,24 @@ describe Gitlab::JiraImport::LabelsImporter do
end
describe '#execute', :clean_gitlab_redis_cache do
- context 'when label creation failes' do
- before do
- allow_next_instance_of(Labels::CreateService) do |instance|
- allow(instance).to receive(:execute).and_return(nil)
- end
- end
+ context 'when label is missing from jira import' do
+ let_it_be(:no_label_jira_import) { create(:jira_import_state, label: nil, project: project) }
it 'raises error' do
- expect { subject }.to raise_error(Projects::ImportService::Error, 'Failed to create import label for jira import.')
+ expect { subject }.to raise_error(Projects::ImportService::Error, 'Failed to find import label for jira import.')
end
end
- context 'when label is created successfully' do
- it 'creates import label' do
- expect { subject }.to change { Label.count }.by(1)
- end
+ context 'when label exists' do
+ let_it_be(:label) { create(:label) }
+ let_it_be(:jira_import_with_label) { create(:jira_import_state, label: label, project: project) }
it 'caches import label' do
expect(Gitlab::Cache::Import::Caching.read(Gitlab::JiraImport.import_label_cache_key(project.id))).to be nil
subject
- expect(Gitlab::JiraImport.get_import_label_id(project.id).to_i).to be > 0
+ expect(Gitlab::JiraImport.get_import_label_id(project.id).to_i).to eq(label.id)
end
end
end
diff --git a/spec/lib/gitlab/legacy_github_import/client_spec.rb b/spec/lib/gitlab/legacy_github_import/client_spec.rb
index 8d1786ae49a..d266b39bd81 100644
--- a/spec/lib/gitlab/legacy_github_import/client_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/client_spec.rb
@@ -5,8 +5,9 @@ require 'spec_helper'
describe Gitlab::LegacyGithubImport::Client do
let(:token) { '123456' }
let(:github_provider) { Settingslogic.new('app_id' => 'asd123', 'app_secret' => 'asd123', 'name' => 'github', 'args' => { 'client_options' => {} }) }
+ let(:wait_for_rate_limit_reset) { true }
- subject(:client) { described_class.new(token) }
+ subject(:client) { described_class.new(token, wait_for_rate_limit_reset: wait_for_rate_limit_reset) }
before do
allow(Gitlab.config.omniauth).to receive(:providers).and_return([github_provider])
@@ -88,10 +89,23 @@ describe Gitlab::LegacyGithubImport::Client do
end
end
- it 'does not raise error when rate limit is disabled' do
- stub_request(:get, /api.github.com/)
- allow(client.api).to receive(:rate_limit!).and_raise(Octokit::NotFound)
+ context 'github rate limit' do
+ it 'does not raise error when rate limit is disabled' do
+ stub_request(:get, /api.github.com/)
+ allow(client.api).to receive(:rate_limit!).and_raise(Octokit::NotFound)
- expect { client.issues {} }.not_to raise_error
+ expect { client.repos }.not_to raise_error
+ end
+
+ context 'when wait for rate limit is disabled' do
+ let(:wait_for_rate_limit_reset) { false }
+
+ it 'raises the error limit error when requested' do
+ stub_request(:get, /api.github.com/)
+ allow(client.api).to receive(:repos).and_raise(Octokit::TooManyRequests)
+
+ expect { client.repos }.to raise_error(Octokit::TooManyRequests)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
index d957b1c992f..3cb02a8bcb3 100644
--- a/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
+++ b/spec/lib/gitlab/metrics/dashboard/processor_spec.rb
@@ -14,9 +14,11 @@ describe Gitlab::Metrics::Dashboard::Processor do
Gitlab::Metrics::Dashboard::Stages::CustomMetricsInserter,
Gitlab::Metrics::Dashboard::Stages::CustomMetricsDetailsInserter,
Gitlab::Metrics::Dashboard::Stages::EndpointInserter,
- Gitlab::Metrics::Dashboard::Stages::Sorter
+ Gitlab::Metrics::Dashboard::Stages::Sorter,
+ Gitlab::Metrics::Dashboard::Stages::AlertsInserter
]
end
+
let(:process_params) { [project, dashboard_yml, sequence, { environment: environment }] }
let(:dashboard) { described_class.new(*process_params).process }
@@ -113,6 +115,54 @@ describe Gitlab::Metrics::Dashboard::Processor do
end
end
+ context 'when the dashboard references persisted metrics with alerts' do
+ let!(:alert) do
+ create(
+ :prometheus_alert,
+ environment: environment,
+ project: project,
+ prometheus_metric: persisted_metric
+ )
+ end
+
+ shared_examples_for 'has saved alerts' do
+ it 'includes an alert path' do
+ target_metric = all_metrics.find { |metric| metric[:metric_id] == persisted_metric.id }
+
+ expect(target_metric).to be_a Hash
+ expect(target_metric).to include(:alert_path)
+ expect(target_metric[:alert_path]).to include(
+ project.path,
+ persisted_metric.id.to_s,
+ environment.id.to_s
+ )
+ end
+ end
+
+ context 'that are shared across projects' do
+ let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
+
+ it_behaves_like 'has saved alerts'
+ end
+
+ context 'when the project has associated metrics' do
+ let!(:persisted_metric) { create(:prometheus_metric, project: project, group: :business) }
+
+ it_behaves_like 'has saved alerts'
+ end
+ end
+
+ context 'when there are no alerts' do
+ let!(:persisted_metric) { create(:prometheus_metric, :common, identifier: 'metric_a1') }
+
+ it 'does not insert an alert_path' do
+ target_metric = all_metrics.find { |metric| metric[:metric_id] == persisted_metric.id }
+
+ expect(target_metric).to be_a Hash
+ expect(target_metric).not_to include(:alert_path)
+ end
+ end
+
shared_examples_for 'errors with message' do |expected_message|
it 'raises a DashboardLayoutError' do
error_class = Gitlab::Metrics::Dashboard::Errors::DashboardProcessingError
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 6221d6fb45f..b09194e7d0b 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -103,6 +103,12 @@ describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
expect(cache.expire(:foo)).to eq(1)
expect(cache.read(:foo)).to be_empty
end
+
+ it 'logs the failure' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception)
+
+ cache.expire(:foo)
+ end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
index e11613b202d..6e8a8c03aad 100644
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/duplicate_job_spec.rb
@@ -113,28 +113,22 @@ describe Gitlab::SidekiqMiddleware::DuplicateJobs::DuplicateJob, :clean_gitlab_r
end
describe 'droppable?' do
- where(:idempotent, :duplicate, :feature_enabled) do
- # [true, false].repeated_permutation(3)
- [[true, true, true],
- [true, true, false],
- [true, false, true],
- [true, false, false],
- [false, true, true],
- [false, true, false],
- [false, false, true],
- [false, false, false]]
+ where(:idempotent, :duplicate) do
+ # [true, false].repeated_permutation(2)
+ [[true, true],
+ [true, false],
+ [false, true],
+ [false, false]]
end
with_them do
before do
allow(AuthorizedProjectsWorker).to receive(:idempotent?).and_return(idempotent)
allow(duplicate_job).to receive(:duplicate?).and_return(duplicate)
- allow(Gitlab::SidekiqMiddleware::DuplicateJobs)
- .to receive(:drop_duplicates?).with(queue).and_return(feature_enabled)
end
it 'is droppable when all conditions are met' do
- if idempotent && duplicate && feature_enabled
+ if idempotent && duplicate
expect(duplicate_job).to be_droppable
else
expect(duplicate_job).not_to be_droppable
diff --git a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs_spec.rb b/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs_spec.rb
deleted file mode 100644
index fa5938f470b..00000000000
--- a/spec/lib/gitlab/sidekiq_middleware/duplicate_jobs_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-describe Gitlab::SidekiqMiddleware::DuplicateJobs do
- using RSpec::Parameterized::TableSyntax
-
- describe '.drop_duplicates?' do
- where(:global_feature_enabled, :selected_queue_enabled, :queue, :expected) do
- true | true | described_class::DROPPABLE_QUEUES.first | true
- true | true | "other_queue" | true
- true | false | described_class::DROPPABLE_QUEUES.first | true
- true | false | "other_queue" | true
- false | true | described_class::DROPPABLE_QUEUES.first | true
- false | true | "other_queue" | false
- false | false | described_class::DROPPABLE_QUEUES.first | false
- false | false | "other_queue" | false
- end
-
- with_them do
- before do
- stub_feature_flags(drop_duplicate_sidekiq_jobs: global_feature_enabled,
- drop_duplicate_sidekiq_jobs_for_queue: selected_queue_enabled)
- end
-
- it "allows dropping jobs when expected" do
- expect(described_class.drop_duplicates?(queue)).to be(expected)
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/static_site_editor/config_spec.rb b/spec/lib/gitlab/static_site_editor/config_spec.rb
new file mode 100644
index 00000000000..dea79fb0e92
--- /dev/null
+++ b/spec/lib/gitlab/static_site_editor/config_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Gitlab::StaticSiteEditor::Config do
+ subject(:config) { described_class.new(repository, ref, file_path, return_url) }
+
+ let(:project) { create(:project, :public, :repository, name: 'project', namespace: namespace) }
+ let(:namespace) { create(:namespace, name: 'namespace') }
+ let(:repository) { project.repository }
+ let(:ref) { 'master' }
+ let(:file_path) { 'README.md' }
+ let(:return_url) { 'http://example.com' }
+
+ describe '#payload' do
+ subject { config.payload }
+
+ it 'returns data for the frontend component' do
+ is_expected.to eq(
+ branch: 'master',
+ commit: repository.commit.id,
+ namespace: 'namespace',
+ path: 'README.md',
+ project: 'project',
+ project_id: project.id,
+ return_url: 'http://example.com'
+ )
+ end
+ end
+end
diff --git a/spec/mailers/emails/pages_domains_spec.rb b/spec/mailers/emails/pages_domains_spec.rb
index 78887cef7ab..5029a17e4e5 100644
--- a/spec/mailers/emails/pages_domains_spec.rb
+++ b/spec/mailers/emails/pages_domains_spec.rb
@@ -23,13 +23,20 @@ describe Emails::PagesDomains do
is_expected.to have_subject(email_subject)
is_expected.to have_body_text(project.human_name)
is_expected.to have_body_text(domain.domain)
- is_expected.to have_body_text domain.url
is_expected.to have_body_text project_pages_domain_url(project, domain)
- is_expected.to have_body_text help_page_url('user/project/pages/custom_domains_ssl_tls_certification/index.md', anchor: link_anchor)
end
end
end
+ shared_examples 'a pages domain verification email' do
+ it_behaves_like 'a pages domain email'
+
+ it 'has the expected content' do
+ is_expected.to have_body_text domain.url
+ is_expected.to have_body_text help_page_url('user/project/pages/custom_domains_ssl_tls_certification/index.md', anchor: link_anchor)
+ end
+ end
+
shared_examples 'notification about upcoming domain removal' do
context 'when domain is not scheduled for removal' do
it 'asks user to remove it' do
@@ -56,7 +63,7 @@ describe Emails::PagesDomains do
subject { Notify.pages_domain_enabled_email(domain, user) }
- it_behaves_like 'a pages domain email'
+ it_behaves_like 'a pages domain verification email'
it { is_expected.to have_body_text 'has been enabled' }
end
@@ -67,7 +74,7 @@ describe Emails::PagesDomains do
subject { Notify.pages_domain_disabled_email(domain, user) }
- it_behaves_like 'a pages domain email'
+ it_behaves_like 'a pages domain verification email'
it_behaves_like 'notification about upcoming domain removal'
@@ -80,7 +87,7 @@ describe Emails::PagesDomains do
subject { Notify.pages_domain_verification_succeeded_email(domain, user) }
- it_behaves_like 'a pages domain email'
+ it_behaves_like 'a pages domain verification email'
it { is_expected.to have_body_text 'successfully verified' }
end
@@ -94,10 +101,18 @@ describe Emails::PagesDomains do
it_behaves_like 'a pages domain email'
it_behaves_like 'notification about upcoming domain removal'
+ end
+
+ describe '#pages_domain_auto_ssl_failed_email' do
+ let(:email_subject) { "#{project.path} | ACTION REQUIRED: Something went wrong while obtaining the Let's Encrypt certificate for GitLab Pages domain '#{domain.domain}'" }
+
+ subject { Notify.pages_domain_auto_ssl_failed_email(domain, user) }
+
+ it_behaves_like 'a pages domain email'
- it 'says verification has failed and when the domain is enabled until' do
- is_expected.to have_body_text 'Verification has failed'
- is_expected.to have_body_text domain.enabled_until.strftime('%F %T')
+ it 'says that we failed to obtain certificate' do
+ is_expected.to have_body_text "Something went wrong while obtaining the Let's Encrypt certificate."
+ is_expected.to have_body_text help_page_url('user/project/pages/custom_domains_ssl_tls_certification/lets_encrypt_integration.md', anchor: 'troubleshooting')
end
end
end
diff --git a/spec/migrations/20200406102120_backfill_deployment_clusters_from_deployments_spec.rb b/spec/migrations/20200406102120_backfill_deployment_clusters_from_deployments_spec.rb
new file mode 100644
index 00000000000..fcb253677e1
--- /dev/null
+++ b/spec/migrations/20200406102120_backfill_deployment_clusters_from_deployments_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20200406102120_backfill_deployment_clusters_from_deployments.rb')
+
+describe BackfillDeploymentClustersFromDeployments, :migration, :sidekiq, schema: 20200227140242 do
+ describe '#up' do
+ it 'schedules BackfillDeploymentClustersFromDeployments background jobs' do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
+
+ namespace = table(:namespaces).create(name: 'the-namespace', path: 'the-path')
+ project = table(:projects).create(name: 'the-project', namespace_id: namespace.id)
+ environment = table(:environments).create(name: 'the-environment', project_id: project.id, slug: 'slug')
+ cluster = table(:clusters).create(name: 'the-cluster')
+
+ deployment_data = { cluster_id: cluster.id, project_id: project.id, environment_id: environment.id, ref: 'abc', tag: false, sha: 'sha', status: 1 }
+
+ # batch 1
+ batch_1_begin = create_deployment(**deployment_data)
+ batch_1_end = create_deployment(**deployment_data)
+
+ # value that should not be included due to default scope
+ create_deployment(**deployment_data, cluster_id: nil)
+
+ # batch 2
+ batch_2_begin = create_deployment(**deployment_data)
+ batch_2_end = create_deployment(**deployment_data)
+
+ Sidekiq::Testing.fake! do
+ Timecop.freeze do
+ migrate!
+
+ # batch 1
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, batch_1_begin.id, batch_1_end.id)
+
+ # batch 2
+ expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, batch_2_begin.id, batch_2_end.id)
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(2)
+ end
+ end
+ end
+
+ def create_deployment(**data)
+ @iid ||= 0
+ @iid += 1
+ table(:deployments).create(iid: @iid, **data)
+ end
+ end
+end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 6f6ff3704b4..80b619ed2b1 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -349,16 +349,13 @@ describe Ci::JobArtifact do
end
describe 'file is being stored' do
- subject { create(:ci_job_artifact, :archive) }
-
context 'when object has nil store' do
- before do
- subject.update_column(:file_store, nil)
- subject.reload
- end
-
it 'is stored locally' do
- expect(subject.file_store).to be(nil)
+ subject = build(:ci_job_artifact, :archive, file_store: nil)
+
+ subject.save
+
+ expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
end
@@ -366,6 +363,10 @@ describe Ci::JobArtifact do
context 'when existing object has local store' do
it 'is stored locally' do
+ subject = build(:ci_job_artifact, :archive)
+
+ subject.save
+
expect(subject.file_store).to be(ObjectStorage::Store::LOCAL)
expect(subject.file).to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::LOCAL)
@@ -379,6 +380,10 @@ describe Ci::JobArtifact do
context 'when file is stored' do
it 'is stored remotely' do
+ subject = build(:ci_job_artifact, :archive)
+
+ subject.save
+
expect(subject.file_store).to eq(ObjectStorage::Store::REMOTE)
expect(subject.file).not_to be_file_storage
expect(subject.file.object_store).to eq(ObjectStorage::Store::REMOTE)
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index 844e50dbb58..90412136c1d 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -2367,18 +2367,31 @@ describe Ci::Pipeline, :mailer do
end
end
- describe "#all_merge_requests" do
+ describe '#all_merge_requests' do
let(:project) { create(:project) }
shared_examples 'a method that returns all merge requests for a given pipeline' do
let(:pipeline) { create(:ci_empty_pipeline, status: 'created', project: pipeline_project, ref: 'master') }
- it "returns all merge requests having the same source branch" do
+ it 'returns all merge requests having the same source branch and the pipeline sha' do
merge_request = create(:merge_request, source_project: pipeline_project, target_project: project, source_branch: pipeline.ref)
+ create(:merge_request_diff, merge_request: merge_request).tap do |diff|
+ create(:merge_request_diff_commit, merge_request_diff: diff, sha: pipeline.sha)
+ end
+
expect(pipeline.all_merge_requests).to eq([merge_request])
end
+ it "doesn't return merge requests having the same source branch without the pipeline sha" do
+ merge_request = create(:merge_request, source_project: pipeline_project, target_project: project, source_branch: pipeline.ref)
+ create(:merge_request_diff, merge_request: merge_request).tap do |diff|
+ create(:merge_request_diff_commit, merge_request_diff: diff, sha: 'unrelated')
+ end
+
+ expect(pipeline.all_merge_requests).to be_empty
+ end
+
it "doesn't return merge requests having a different source branch" do
create(:merge_request, source_project: pipeline_project, target_project: project, source_branch: 'feature', target_branch: 'master')
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index 568699cf3f6..a2d4c046d46 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -62,7 +62,7 @@ describe DeployToken do
context 'with no scopes' do
it 'is invalid' do
- deploy_token = build(:deploy_token, read_repository: false, read_registry: false)
+ deploy_token = build(:deploy_token, read_repository: false, read_registry: false, write_registry: false)
expect(deploy_token).not_to be_valid
expect(deploy_token.errors[:base].first).to eq("Scopes can't be blank")
@@ -79,7 +79,7 @@ describe DeployToken do
context 'with only one scope' do
it 'returns scopes assigned to DeployToken' do
- deploy_token = create(:deploy_token, read_registry: false)
+ deploy_token = create(:deploy_token, read_registry: false, write_registry: false)
expect(deploy_token.scopes).to eq([:read_repository])
end
end
diff --git a/spec/models/diff_note_position_spec.rb b/spec/models/diff_note_position_spec.rb
new file mode 100644
index 00000000000..a00ba35feef
--- /dev/null
+++ b/spec/models/diff_note_position_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe DiffNotePosition, type: :model do
+ it 'has a position attribute' do
+ diff_position = build(:diff_position)
+ line_code = 'bd4b7bfff3a247ccf6e3371c41ec018a55230bcc_534_521'
+ diff_note_position = build(:diff_note_position, line_code: line_code, position: diff_position)
+
+ expect(diff_note_position.position).to eq(diff_position)
+ expect(diff_note_position.line_code).to eq(line_code)
+ expect(diff_note_position.diff_content_type).to eq('text')
+ end
+
+ it 'unique by note_id and diff type' do
+ existing_diff_note_position = create(:diff_note_position)
+ diff_note_position = build(:diff_note_position, note: existing_diff_note_position.note)
+
+ expect { diff_note_position.save! }.to raise_error(ActiveRecord::RecordNotUnique)
+ end
+end
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index 4bf56e7b28b..fa2648979e9 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -536,6 +536,24 @@ describe PagesDomain do
'user_provided', 'gitlab_provided')
end
+ describe '#save' do
+ context 'when we failed to obtain ssl certificate' do
+ let(:domain) { create(:pages_domain, auto_ssl_enabled: true, auto_ssl_failed: true) }
+
+ it 'clears failure if auto ssl is disabled' do
+ expect do
+ domain.update!(auto_ssl_enabled: false)
+ end.to change { domain.auto_ssl_failed }.from(true).to(false)
+ end
+
+ it 'does not clear failure on unrelated updates' do
+ expect do
+ domain.update!(verified_at: Time.now)
+ end.not_to change { domain.auto_ssl_failed }.from(true)
+ end
+ end
+ end
+
describe '.for_removal' do
subject { described_class.for_removal }
diff --git a/spec/models/project_services/chat_message/pipeline_message_spec.rb b/spec/models/project_services/chat_message/pipeline_message_spec.rb
index 4210b52a8b9..e99148d1d1f 100644
--- a/spec/models/project_services/chat_message/pipeline_message_spec.rb
+++ b/spec/models/project_services/chat_message/pipeline_message_spec.rb
@@ -431,6 +431,57 @@ describe ChatMessage::PipelineMessage do
end
end
+ context "when jobs succeed on retries" do
+ before do
+ args[:builds] = [
+ { id: 1, name: "job-1", status: "failed", stage: "stage-1" },
+ { id: 2, name: "job-2", status: "failed", stage: "stage-2" },
+ { id: 3, name: "job-3", status: "failed", stage: "stage-3" },
+ { id: 7, name: "job-1", status: "failed", stage: "stage-1" },
+ { id: 8, name: "job-1", status: "success", stage: "stage-1" }
+ ]
+ end
+
+ it "do not return a job which succeeded on retry" do
+ expected_jobs = [
+ "<http://example.gitlab.com/-/jobs/3|job-3>",
+ "<http://example.gitlab.com/-/jobs/2|job-2>"
+ ]
+
+ expect(subject.attachments.first[:fields][3]).to eq(
+ title: "Failed jobs",
+ value: expected_jobs.join(", "),
+ short: true
+ )
+ end
+ end
+
+ context "when jobs failed even on retries" do
+ before do
+ args[:builds] = [
+ { id: 1, name: "job-1", status: "failed", stage: "stage-1" },
+ { id: 2, name: "job-2", status: "failed", stage: "stage-2" },
+ { id: 3, name: "job-3", status: "failed", stage: "stage-3" },
+ { id: 7, name: "job-1", status: "failed", stage: "stage-1" },
+ { id: 8, name: "job-1", status: "failed", stage: "stage-1" }
+ ]
+ end
+
+ it "returns only first instance of the failed job" do
+ expected_jobs = [
+ "<http://example.gitlab.com/-/jobs/3|job-3>",
+ "<http://example.gitlab.com/-/jobs/2|job-2>",
+ "<http://example.gitlab.com/-/jobs/1|job-1>"
+ ]
+
+ expect(subject.attachments.first[:fields][3]).to eq(
+ title: "Failed jobs",
+ value: expected_jobs.join(", "),
+ short: true
+ )
+ end
+ end
+
context "when the CI config file contains a YAML error" do
let(:has_yaml_errors) { true }
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index 415d634d405..5565d30d8c1 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -133,7 +133,7 @@ describe PrometheusService, :use_clean_rails_memory_store_caching do
it 'creates default alerts' do
expect(Prometheus::CreateDefaultAlertsWorker)
.to receive(:perform_async)
- .with(project_id: project.id)
+ .with(project.id)
create_service
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 5a9ca9f7b7e..13f1bcb389a 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -655,4 +655,26 @@ describe GroupPolicy do
end
end
end
+
+ it_behaves_like 'model with wiki policies' do
+ let(:container) { create(:group) }
+
+ def set_access_level(access_level)
+ allow(container).to receive(:wiki_access_level).and_return(access_level)
+ end
+
+ before do
+ stub_feature_flags(group_wiki: true)
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(group_wiki: false)
+ end
+
+ it 'does not include the wiki permissions' do
+ expect_disallowed(*permissions)
+ end
+ end
+ end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index d098369e124..db643e3a31f 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -121,147 +121,11 @@ describe ProjectPolicy do
expect(Ability).not_to be_allowed(user, :read_issue, project)
end
- context 'wiki feature' do
- let(:permissions) { %i(read_wiki create_wiki update_wiki admin_wiki download_wiki_code) }
+ it_behaves_like 'model with wiki policies' do
+ let(:container) { project }
- subject { described_class.new(owner, project) }
-
- context 'when the feature is disabled' do
- before do
- project.project_feature.update_attribute(:wiki_access_level, ProjectFeature::DISABLED)
- end
-
- it 'does not include the wiki permissions' do
- expect_disallowed(*permissions)
- end
-
- context 'when there is an external wiki' do
- it 'does not include the wiki permissions' do
- allow(project).to receive(:has_external_wiki?).and_return(true)
-
- expect_disallowed(*permissions)
- end
- end
- end
-
- describe 'read_wiki' do
- subject { described_class.new(user, project) }
-
- member_roles = %i[guest developer]
- stranger_roles = %i[anonymous non_member]
-
- user_roles = stranger_roles + member_roles
-
- # When a user is anonymous, their `current_user == nil`
- let(:user) { create(:user) unless user_role == :anonymous }
-
- before do
- project.visibility = project_visibility
- project.project_feature.update_attribute(:wiki_access_level, wiki_access_level)
- project.add_user(user, user_role) if member_roles.include?(user_role)
- end
-
- title = ->(project_visibility, wiki_access_level, user_role) do
- [
- "project is #{Gitlab::VisibilityLevel.level_name project_visibility}",
- "wiki is #{ProjectFeature.str_from_access_level wiki_access_level}",
- "user is #{user_role}"
- ].join(', ')
- end
-
- describe 'Situations where :read_wiki is always false' do
- where(case_names: title,
- project_visibility: Gitlab::VisibilityLevel.options.values,
- wiki_access_level: [ProjectFeature::DISABLED],
- user_role: user_roles)
-
- with_them do
- it { is_expected.to be_disallowed(:read_wiki) }
- end
- end
-
- describe 'Situations where :read_wiki is always true' do
- where(case_names: title,
- project_visibility: [Gitlab::VisibilityLevel::PUBLIC],
- wiki_access_level: [ProjectFeature::ENABLED],
- user_role: user_roles)
-
- with_them do
- it { is_expected.to be_allowed(:read_wiki) }
- end
- end
-
- describe 'Situations where :read_wiki requires project membership' do
- context 'the wiki is private, and the user is a member' do
- where(case_names: title,
- project_visibility: [Gitlab::VisibilityLevel::PUBLIC,
- Gitlab::VisibilityLevel::INTERNAL],
- wiki_access_level: [ProjectFeature::PRIVATE],
- user_role: member_roles)
-
- with_them do
- it { is_expected.to be_allowed(:read_wiki) }
- end
- end
-
- context 'the wiki is private, and the user is not member' do
- where(case_names: title,
- project_visibility: [Gitlab::VisibilityLevel::PUBLIC,
- Gitlab::VisibilityLevel::INTERNAL],
- wiki_access_level: [ProjectFeature::PRIVATE],
- user_role: stranger_roles)
-
- with_them do
- it { is_expected.to be_disallowed(:read_wiki) }
- end
- end
-
- context 'the wiki is enabled, and the user is a member' do
- where(case_names: title,
- project_visibility: [Gitlab::VisibilityLevel::PRIVATE],
- wiki_access_level: [ProjectFeature::ENABLED],
- user_role: member_roles)
-
- with_them do
- it { is_expected.to be_allowed(:read_wiki) }
- end
- end
-
- context 'the wiki is enabled, and the user is not a member' do
- where(case_names: title,
- project_visibility: [Gitlab::VisibilityLevel::PRIVATE],
- wiki_access_level: [ProjectFeature::ENABLED],
- user_role: stranger_roles)
-
- with_them do
- it { is_expected.to be_disallowed(:read_wiki) }
- end
- end
- end
-
- describe 'Situations where :read_wiki prohibits anonymous access' do
- context 'the user is not anonymous' do
- where(case_names: title,
- project_visibility: [Gitlab::VisibilityLevel::INTERNAL],
- wiki_access_level: [ProjectFeature::ENABLED, ProjectFeature::PUBLIC],
- user_role: user_roles.reject { |u| u == :anonymous })
-
- with_them do
- it { is_expected.to be_allowed(:read_wiki) }
- end
- end
-
- context 'the user is not anonymous' do
- where(case_names: title,
- project_visibility: [Gitlab::VisibilityLevel::INTERNAL],
- wiki_access_level: [ProjectFeature::ENABLED, ProjectFeature::PUBLIC],
- user_role: %i[anonymous])
-
- with_them do
- it { is_expected.to be_disallowed(:read_wiki) }
- end
- end
- end
+ def set_access_level(access_level)
+ project.project_feature.update_attribute(:wiki_access_level, access_level)
end
end
diff --git a/spec/presenters/ci/pipeline_presenter_spec.rb b/spec/presenters/ci/pipeline_presenter_spec.rb
index 28eb6804703..e8b66682b97 100644
--- a/spec/presenters/ci/pipeline_presenter_spec.rb
+++ b/spec/presenters/ci/pipeline_presenter_spec.rb
@@ -236,7 +236,7 @@ describe Ci::PipelinePresenter do
context 'for a branch pipeline with two open MRs' do
let!(:one) { create(:merge_request, source_project: project, source_branch: pipeline.ref) }
- let!(:two) { create(:merge_request, source_project: project, source_branch: pipeline.ref, target_branch: 'wip') }
+ let!(:two) { create(:merge_request, source_project: project, source_branch: pipeline.ref, target_branch: 'fix') }
it { is_expected.to contain_exactly(one, two) }
end
diff --git a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
index 7e213d3adb0..feca89558e3 100644
--- a/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
+++ b/spec/requests/api/graphql/mutations/jira_import/start_spec.rb
@@ -99,6 +99,12 @@ describe 'Starting a Jira Import' do
it_behaves_like 'a mutation that returns errors in the response', errors: ['Jira integration not configured.']
end
+ context 'when issues feature are disabled' do
+ let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+
+ it_behaves_like 'a mutation that returns errors in the response', errors: ['Cannot import because issues are not available in this project.']
+ end
+
context 'when when project has Jira service' do
let!(:service) { create(:jira_service, project: project) }
diff --git a/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb b/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb
index e260e4463f4..c616310a72c 100644
--- a/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
describe 'getting notes for a merge request' do
include GraphqlHelpers
- let(:noteable) { create(:merge_request) }
+ let_it_be(:noteable) { create(:merge_request) }
def noteable_query(noteable_fields)
<<~QRY
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index e1fe6470881..a1b3111ff71 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -93,4 +93,41 @@ describe 'getting merge request information nested in a project' do
expect(merge_request_graphql_data['pipelines']['edges'].size).to eq(1)
end
end
+
+ context 'when limiting the number of results' do
+ let(:merge_requests_graphql_data) { graphql_data['project']['mergeRequests']['edges'] }
+
+ let!(:merge_requests) do
+ [
+ create(:merge_request, source_project: project, source_branch: 'branch-1'),
+ create(:merge_request, source_project: project, source_branch: 'branch-2'),
+ create(:merge_request, source_project: project, source_branch: 'branch-3')
+ ]
+ end
+
+ let(:fields) do
+ <<~QUERY
+ edges {
+ node {
+ iid,
+ title
+ }
+ }
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ "mergeRequests(first: 2) { #{fields} }"
+ )
+ end
+
+ it 'returns the correct number of results' do
+ post_graphql(query, current_user: current_user)
+
+ expect(merge_requests_graphql_data.size).to eq 2
+ end
+ end
end
diff --git a/spec/requests/api/group_clusters_spec.rb b/spec/requests/api/group_clusters_spec.rb
index d3bd84f1604..fade54f6b11 100644
--- a/spec/requests/api/group_clusters_spec.rb
+++ b/spec/requests/api/group_clusters_spec.rb
@@ -157,6 +157,7 @@ describe API::GroupClusters do
let(:api_url) { 'https://kubernetes.example.com' }
let(:authorization_type) { 'rbac' }
+ let(:management_project_id) { create(:project, group: group).id }
let(:platform_kubernetes_attributes) do
{
@@ -171,7 +172,8 @@ describe API::GroupClusters do
name: 'test-cluster',
domain: 'domain.example.com',
managed: false,
- platform_kubernetes_attributes: platform_kubernetes_attributes
+ platform_kubernetes_attributes: platform_kubernetes_attributes,
+ management_project_id: management_project_id
}
end
@@ -203,6 +205,7 @@ describe API::GroupClusters do
expect(cluster_result.name).to eq('test-cluster')
expect(cluster_result.domain).to eq('domain.example.com')
expect(cluster_result.managed).to be_falsy
+ expect(cluster_result.management_project_id).to eq management_project_id
expect(platform_kubernetes.rbac?).to be_truthy
expect(platform_kubernetes.api_url).to eq(api_url)
expect(platform_kubernetes.token).to eq('sample-token')
@@ -234,6 +237,18 @@ describe API::GroupClusters do
end
end
+ context 'current user does not have access to management_project_id' do
+ let(:management_project_id) { create(:project).id }
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns validation errors' do
+ expect(json_response['message']['management_project_id'].first).to match('don\'t have permission')
+ end
+ end
+
context 'with invalid params' do
let(:api_url) { 'invalid_api_url' }
diff --git a/spec/requests/api/metrics/dashboard/annotations_spec.rb b/spec/requests/api/metrics/dashboard/annotations_spec.rb
new file mode 100644
index 00000000000..0b51c46e474
--- /dev/null
+++ b/spec/requests/api/metrics/dashboard/annotations_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe API::Metrics::Dashboard::Annotations do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private, :repository, namespace: user.namespace) }
+ let_it_be(:environment) { create(:environment, project: project) }
+ let(:dashboard) { 'config/prometheus/common_metrics.yml' }
+ let(:starting_at) { Time.now.iso8601 }
+ let(:ending_at) { 1.hour.from_now.iso8601 }
+ let(:params) { attributes_for(:metrics_dashboard_annotation, environment: environment, starting_at: starting_at, ending_at: ending_at, dashboard_path: dashboard)}
+
+ describe 'POST /environments/:environment_id/metrics_dashboard/annotations' do
+ before :all do
+ project.add_developer(user)
+ end
+
+ context 'feature flag metrics_dashboard_annotations' do
+ context 'is on' do
+ before do
+ stub_feature_flags(metrics_dashboard_annotations: { enabled: true, thing: project })
+ end
+ context 'with correct permissions' do
+ context 'with valid parameters' do
+ it 'creates a new annotation', :aggregate_failures do
+ post api("/environments/#{environment.id}/metrics_dashboard/annotations", user), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(json_response['environment_id']).to eq(environment.id)
+ expect(json_response['starting_at'].to_time).to eq(starting_at.to_time)
+ expect(json_response['ending_at'].to_time).to eq(ending_at.to_time)
+ expect(json_response['description']).to eq(params[:description])
+ expect(json_response['dashboard_path']).to eq(dashboard)
+ end
+ end
+
+ context 'with invalid parameters' do
+ it 'returns error messsage' do
+ post api("/environments/#{environment.id}/metrics_dashboard/annotations", user),
+ params: { dashboard_path: nil, starting_at: nil, description: nil }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include({ "starting_at" => ["can't be blank"], "description" => ["can't be blank"], "dashboard_path" => ["can't be blank"] })
+ end
+ end
+
+ context 'with undeclared params' do
+ before do
+ params[:undeclared_param] = 'xyz'
+ end
+ it 'filters out undeclared params' do
+ expect(::Metrics::Dashboard::Annotations::CreateService).to receive(:new).with(user, hash_excluding(:undeclared_param))
+
+ post api("/environments/#{environment.id}/metrics_dashboard/annotations", user), params: params
+ end
+ end
+ end
+
+ context 'without correct permissions' do
+ let_it_be(:guest) { create(:user) }
+
+ before do
+ project.add_guest(guest)
+ end
+
+ it 'returns error messsage' do
+ post api("/environments/#{environment.id}/metrics_dashboard/annotations", guest), params: params
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+ context 'is off' do
+ before do
+ stub_feature_flags(metrics_dashboard_annotations: { enabled: false, thing: project })
+ end
+
+ it 'returns error messsage' do
+ post api("/environments/#{environment.id}/metrics_dashboard/annotations", user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/project_clusters_spec.rb b/spec/requests/api/project_clusters_spec.rb
index 648577dce8d..ed899e830e1 100644
--- a/spec/requests/api/project_clusters_spec.rb
+++ b/spec/requests/api/project_clusters_spec.rb
@@ -150,6 +150,12 @@ describe API::ProjectClusters do
let(:api_url) { 'https://kubernetes.example.com' }
let(:namespace) { project.path }
let(:authorization_type) { 'rbac' }
+ let(:management_project) { create(:project, namespace: project.namespace) }
+ let(:management_project_id) { management_project.id }
+
+ before do
+ management_project.add_maintainer(current_user)
+ end
let(:platform_kubernetes_attributes) do
{
@@ -165,7 +171,8 @@ describe API::ProjectClusters do
name: 'test-cluster',
domain: 'domain.example.com',
managed: false,
- platform_kubernetes_attributes: platform_kubernetes_attributes
+ platform_kubernetes_attributes: platform_kubernetes_attributes,
+ management_project_id: management_project_id
}
end
@@ -194,6 +201,7 @@ describe API::ProjectClusters do
expect(cluster_result.name).to eq('test-cluster')
expect(cluster_result.domain).to eq('domain.example.com')
expect(cluster_result.managed).to be_falsy
+ expect(cluster_result.management_project_id).to eq management_project_id
expect(platform_kubernetes.rbac?).to be_truthy
expect(platform_kubernetes.api_url).to eq(api_url)
expect(platform_kubernetes.namespace).to eq(namespace)
@@ -227,6 +235,18 @@ describe API::ProjectClusters do
end
end
+ context 'current user does not have access to management_project_id' do
+ let(:management_project_id) { create(:project).id }
+
+ it 'responds with 400' do
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it 'returns validation errors' do
+ expect(json_response['message']['management_project_id'].first).to match('don\'t have permission')
+ end
+ end
+
context 'with invalid params' do
let(:namespace) { 'invalid_namespace' }
diff --git a/spec/rubocop/cop/performance/ar_count_each_spec.rb b/spec/rubocop/cop/performance/ar_count_each_spec.rb
new file mode 100644
index 00000000000..f934a1fde48
--- /dev/null
+++ b/spec/rubocop/cop/performance/ar_count_each_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../support/helpers/expect_offense'
+require_relative '../../../../rubocop/cop/performance/ar_count_each.rb'
+
+describe RuboCop::Cop::Performance::ARCountEach do
+ include CopHelper
+ include ExpectOffense
+
+ subject(:cop) { described_class.new }
+
+ context 'when it is not haml file' do
+ it 'does not flag it as an offense' do
+ expect(subject).to receive(:in_haml_file?).with(anything).at_least(:once).and_return(false)
+
+ expect_no_offenses <<~SOURCE
+ show(@users.count)
+ @users.each { |user| display(user) }
+ SOURCE
+ end
+ end
+
+ context 'when it is haml file' do
+ before do
+ expect(subject).to receive(:in_haml_file?).with(anything).at_least(:once).and_return(true)
+ end
+
+ context 'when the same object uses count and each' do
+ it 'flags it as an offense' do
+ expect_offense <<~SOURCE
+ show(@users.count)
+ ^^^^^^^^^^^^ If @users is AR relation, avoid `@users.count ...; @users.each... `, this will trigger two queries. Use `@users.load.size ...; @users.each... ` instead. If @users is an array, try to use @users.size.
+ @users.each { |user| display(user) }
+ SOURCE
+
+ expect(cop.offenses.map(&:cop_name)).to contain_exactly('Performance/ARCountEach')
+ end
+ end
+
+ context 'when different object uses count and each' do
+ it 'does not flag it as an offense' do
+ expect_no_offenses <<~SOURCE
+ show(@emails.count)
+ @users.each { |user| display(user) }
+ SOURCE
+ end
+ end
+
+ context 'when just using count without each' do
+ it 'does not flag it as an offense' do
+ expect_no_offenses '@users.count'
+ end
+ end
+
+ context 'when just using each without count' do
+ it 'does not flag it as an offense' do
+ expect_no_offenses '@users.each { |user| display(user) }'
+ end
+ end
+ end
+end
diff --git a/spec/serializers/build_details_entity_spec.rb b/spec/serializers/build_details_entity_spec.rb
index 15f605b183d..92917f6ea25 100644
--- a/spec/serializers/build_details_entity_spec.rb
+++ b/spec/serializers/build_details_entity_spec.rb
@@ -15,7 +15,7 @@ describe BuildDetailsEntity do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:build) { create(:ci_build, :failed, pipeline: pipeline) }
- let(:request) { double('request') }
+ let(:request) { double('request', project: project) }
let(:entity) do
described_class.new(build, request: request,
diff --git a/spec/services/application_settings/update_service_spec.rb b/spec/services/application_settings/update_service_spec.rb
index 6e1fdb7aad0..069572e4dff 100644
--- a/spec/services/application_settings/update_service_spec.rb
+++ b/spec/services/application_settings/update_service_spec.rb
@@ -334,4 +334,20 @@ describe ApplicationSettings::UpdateService do
expect(application_settings.protected_paths).to eq(['/users/password', '/users/sign_in'])
end
end
+
+ context 'when issues_create_limit is passsed' do
+ let(:params) do
+ {
+ issues_create_limit: 600
+ }
+ end
+
+ it 'updates issues_create_limit value' do
+ subject.execute
+
+ application_settings.reload
+
+ expect(application_settings.issues_create_limit).to eq(600)
+ end
+ end
end
diff --git a/spec/services/auth/container_registry_authentication_service_spec.rb b/spec/services/auth/container_registry_authentication_service_spec.rb
index 84f4a7a4e7a..8273269c2fb 100644
--- a/spec/services/auth/container_registry_authentication_service_spec.rb
+++ b/spec/services/auth/container_registry_authentication_service_spec.rb
@@ -766,8 +766,8 @@ describe Auth::ContainerRegistryAuthenticationService do
{ scopes: ["repository:#{project.full_path}:pull"] }
end
- context 'when deploy token has read_registry as a scope' do
- let(:current_user) { create(:deploy_token, projects: [project]) }
+ context 'when deploy token has read and write registry as scopes' do
+ let(:current_user) { create(:deploy_token, write_registry: true, projects: [project]) }
shared_examples 'able to login' do
context 'registry provides read_container_image authentication_abilities' do
@@ -790,7 +790,7 @@ describe Auth::ContainerRegistryAuthenticationService do
{ scopes: ["repository:#{project.full_path}:push"] }
end
- it_behaves_like 'an inaccessible'
+ it_behaves_like 'a pushable'
end
it_behaves_like 'able to login'
@@ -808,7 +808,7 @@ describe Auth::ContainerRegistryAuthenticationService do
{ scopes: ["repository:#{project.full_path}:push"] }
end
- it_behaves_like 'an inaccessible'
+ it_behaves_like 'a pushable'
end
it_behaves_like 'able to login'
@@ -826,7 +826,7 @@ describe Auth::ContainerRegistryAuthenticationService do
{ scopes: ["repository:#{project.full_path}:push"] }
end
- it_behaves_like 'an inaccessible'
+ it_behaves_like 'a pushable'
end
it_behaves_like 'able to login'
diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb
index ecf0a9c9dce..3dd25be2a3d 100644
--- a/spec/services/clusters/create_service_spec.rb
+++ b/spec/services/clusters/create_service_spec.rb
@@ -59,4 +59,92 @@ describe Clusters::CreateService do
end
end
end
+
+ context 'when params includes :management_project_id' do
+ subject(:cluster) { described_class.new(user, params).execute(access_token: access_token) }
+
+ let(:params) do
+ {
+ name: 'test-cluster',
+ provider_type: :gcp,
+ provider_gcp_attributes: {
+ gcp_project_id: 'gcp-project',
+ zone: 'us-central1-a',
+ num_nodes: 1,
+ machine_type: 'machine_type-a',
+ legacy_abac: 'true'
+ },
+ clusterable: clusterable,
+ management_project_id: management_project_id
+ }
+ end
+
+ let(:clusterable) { project }
+ let(:management_project_id) { management_project.id }
+ let(:management_project_namespace) { project.namespace }
+ let(:management_project) { create(:project, namespace: management_project_namespace) }
+
+ shared_examples 'invalid project or cluster permissions' do
+ it 'does not persist the cluster and adds errors' do
+ expect(cluster).not_to be_persisted
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+ end
+ end
+
+ shared_examples 'setting a management project' do
+ context 'when user is authorized to adminster manangement_project' do
+ before do
+ management_project.add_maintainer(user)
+ end
+
+ it 'persists the cluster' do
+ expect(cluster).to be_persisted
+
+ expect(cluster.management_project).to eq(management_project)
+ end
+ end
+
+ context 'when user is not authorized to adminster manangement_project' do
+ include_examples 'invalid project or cluster permissions'
+ end
+ end
+
+ shared_examples 'setting a management project outside of scope' do
+ context 'when manangement_project is outside of the namespace scope' do
+ let(:management_project_namespace) { create(:group) }
+
+ it 'does not persist the cluster' do
+ expect(cluster).not_to be_persisted
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+ end
+ end
+ end
+
+ context 'management_project is non-existent' do
+ let(:management_project_id) { 0 }
+
+ include_examples 'invalid project or cluster permissions'
+ end
+
+ context 'project cluster' do
+ include_examples 'setting a management project'
+ include_examples 'setting a management project outside of scope'
+ end
+
+ context 'group cluster' do
+ let(:management_project_namespace) { create(:group) }
+ let(:clusterable) { management_project_namespace }
+
+ include_examples 'setting a management project'
+ include_examples 'setting a management project outside of scope'
+ end
+
+ context 'instance cluster' do
+ let(:clusterable) { Clusters::Instance.new }
+
+ include_examples 'setting a management project'
+ end
+ end
end
diff --git a/spec/services/clusters/management/validate_management_project_permissions_service_spec.rb b/spec/services/clusters/management/validate_management_project_permissions_service_spec.rb
new file mode 100644
index 00000000000..1bcebe2e2ac
--- /dev/null
+++ b/spec/services/clusters/management/validate_management_project_permissions_service_spec.rb
@@ -0,0 +1,88 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe Clusters::Management::ValidateManagementProjectPermissionsService do
+ describe '#execute' do
+ subject { described_class.new(user).execute(cluster, management_project_id) }
+
+ let(:cluster) { build(:cluster, :project, projects: [create(:project)]) }
+ let(:user) { create(:user) }
+
+ context 'when management_project_id is nil' do
+ let(:management_project_id) { nil }
+
+ it { is_expected.to be true }
+ end
+
+ context 'when management_project_id is not nil' do
+ let(:management_project_id) { management_project.id }
+ let(:management_project_namespace) { create(:group) }
+ let(:management_project) { create(:project, namespace: management_project_namespace) }
+
+ context 'when management_project does not exist' do
+ let(:management_project_id) { 0 }
+
+ it 'adds errors to the cluster and returns false' do
+ is_expected.to eq false
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+ end
+ end
+
+ shared_examples 'management project is in scope' do
+ context 'when user is authorized to administer manangement_project' do
+ before do
+ management_project.add_maintainer(user)
+ end
+
+ it 'adds no error and returns true' do
+ is_expected.to eq true
+
+ expect(cluster.errors).to be_empty
+ end
+ end
+
+ context 'when user is not authorized to adminster manangement_project' do
+ it 'adds an error and returns false' do
+ is_expected.to eq false
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+ end
+ end
+ end
+
+ shared_examples 'management project is out of scope' do
+ context 'when manangement_project is outside of the namespace scope' do
+ let(:management_project_namespace) { create(:group) }
+
+ it 'adds an error and returns false' do
+ is_expected.to eq false
+
+ expect(cluster.errors[:management_project_id]).to include('Project does not exist or you don\'t have permission to perform this action')
+ end
+ end
+ end
+
+ context 'project cluster' do
+ let(:cluster) { build(:cluster, :project, projects: [create(:project, namespace: management_project_namespace)]) }
+
+ include_examples 'management project is in scope'
+ include_examples 'management project is out of scope'
+ end
+
+ context 'group cluster' do
+ let(:cluster) { build(:cluster, :group, groups: [management_project_namespace]) }
+
+ include_examples 'management project is in scope'
+ include_examples 'management project is out of scope'
+ end
+
+ context 'instance cluster' do
+ let(:cluster) { build(:cluster, :instance) }
+
+ include_examples 'management project is in scope'
+ end
+ end
+ end
+end
diff --git a/spec/services/environments/auto_stop_service_spec.rb b/spec/services/environments/auto_stop_service_spec.rb
index 3620bf8fe87..b34d15889d3 100644
--- a/spec/services/environments/auto_stop_service_spec.rb
+++ b/spec/services/environments/auto_stop_service_spec.rb
@@ -40,18 +40,6 @@ describe Environments::AutoStopService, :clean_gitlab_redis_shared_state do
expect(Ci::Build.where(name: 'stop_review_app').map(&:status).uniq).to eq(['pending'])
end
- context 'when auto_stop_environments feature flag is disabled' do
- before do
- stub_feature_flags(auto_stop_environments: false)
- end
-
- it 'does not execute Ci::StopEnvironmentsService' do
- expect(Ci::StopEnvironmentsService).not_to receive(:execute_in_batch)
-
- subject
- end
- end
-
context 'when the other sidekiq worker has already been running' do
before do
stub_exclusive_lease_taken(described_class::EXCLUSIVE_LOCK_KEY)
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index a316c8a4219..bd50d6b1001 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -368,10 +368,12 @@ describe Issues::CreateService do
end
context 'checking spam' do
+ let(:title) { 'Legit issue' }
+ let(:description) { 'please fix' }
let(:opts) do
{
- title: 'Awesome issue',
- description: 'please fix',
+ title: title,
+ description: description,
request: double(:request, env: {})
}
end
@@ -382,7 +384,7 @@ describe Issues::CreateService do
context 'when recaptcha was verified' do
let(:log_user) { user }
- let(:spam_logs) { create_list(:spam_log, 2, user: log_user, title: 'Awesome issue') }
+ let(:spam_logs) { create_list(:spam_log, 2, user: log_user, title: title) }
let(:target_spam_log) { spam_logs.last }
before do
@@ -396,7 +398,7 @@ describe Issues::CreateService do
expect(issue).not_to be_spam
end
- it 'issue is valid ' do
+ it 'creates a valid issue' do
expect(issue).to be_valid
end
@@ -405,14 +407,14 @@ describe Issues::CreateService do
end
it 'marks related spam_log as recaptcha_verified' do
- expect { issue }.to change {SpamLog.last.recaptcha_verified}.from(false).to(true)
+ expect { issue }.to change { target_spam_log.reload.recaptcha_verified }.from(false).to(true)
end
context 'when spam log does not belong to a user' do
let(:log_user) { create(:user) }
it 'does not mark spam_log as recaptcha_verified' do
- expect { issue }.not_to change {SpamLog.last.recaptcha_verified}
+ expect { issue }.not_to change { target_spam_log.reload.recaptcha_verified }
end
end
end
@@ -431,8 +433,8 @@ describe Issues::CreateService do
end
end
- context 'when issuables_recaptcha_enabled feature flag is true' do
- it 'marks an issue as spam' do
+ context 'when allow_possible_spam feature flag is false' do
+ it 'marks the issue as spam' do
expect(issue).to be_spam
end
@@ -442,34 +444,26 @@ describe Issues::CreateService do
it 'creates a new spam_log' do
expect { issue }
- .to have_spam_log(title: issue.title, description: issue.description, user_id: user.id, noteable_type: 'Issue')
- end
-
- it 'assigns a spam_log to the issue' do
- expect(issue.spam_log).to eq(SpamLog.last)
+ .to have_spam_log(title: title, description: description, user_id: user.id, noteable_type: 'Issue')
end
end
- context 'when issuable_recaptcha_enabled feature flag is false' do
+ context 'when allow_possible_spam feature flag is true' do
before do
stub_feature_flags(allow_possible_spam: true)
end
- it 'does not mark an issue as spam' do
+ it 'does not mark the issue as spam' do
expect(issue).not_to be_spam
end
- it 'accepts the ​issue as valid' do
+ it '​creates a valid issue' do
expect(issue).to be_valid
end
it 'creates a new spam_log' do
expect { issue }
- .to have_spam_log(title: issue.title, description: issue.description, user_id: user.id, noteable_type: 'Issue')
- end
-
- it 'assigns a spam_log to an issue' do
- expect(issue.spam_log).to eq(SpamLog.last)
+ .to have_spam_log(title: title, description: description, user_id: user.id, noteable_type: 'Issue')
end
end
end
@@ -485,8 +479,8 @@ describe Issues::CreateService do
expect(issue).not_to be_spam
end
- it 'an issue is valid ' do
- expect(issue.valid?).to be_truthy
+ it 'creates a valid issue' do
+ expect(issue).to be_valid
end
it 'does not assign a spam_log to an issue' do
diff --git a/spec/services/jira_import/start_import_service_spec.rb b/spec/services/jira_import/start_import_service_spec.rb
index 8d9ba5ac692..ae0c4f63fee 100644
--- a/spec/services/jira_import/start_import_service_spec.rb
+++ b/spec/services/jira_import/start_import_service_spec.rb
@@ -38,6 +38,12 @@ describe JiraImport::StartImportService do
it_behaves_like 'responds with error', 'Jira integration not configured.'
end
+ context 'when issues feature are disabled' do
+ let_it_be(:project, reload: true) { create(:project, :issues_disabled) }
+
+ it_behaves_like 'responds with error', 'Cannot import because issues are not available in this project.'
+ end
+
context 'when Jira service exists' do
let!(:jira_service) { create(:jira_service, project: project, active: true) }
@@ -75,6 +81,28 @@ describe JiraImport::StartImportService do
expect(jira_import.jira_project_key).to eq(fake_key)
expect(jira_import.user).to eq(user)
end
+
+ it 'creates jira import label' do
+ expect { subject }.to change { Label.count }.by(1)
+ end
+
+ it 'creates jira label title with correct number' do
+ jira_import = subject.payload[:import_data]
+
+ label_title = "jira-import::#{jira_import.jira_project_key}-1"
+ expect(jira_import.label.title).to eq(label_title)
+ end
+
+ context 'when multiple jira imports for same jira project' do
+ let!(:jira_imports) { create_list(:jira_import_state, 3, :finished, project: project, jira_project_key: fake_key)}
+
+ it 'creates jira label title with correct number' do
+ jira_import = subject.payload[:import_data]
+
+ label_title = "jira-import::#{jira_import.jira_project_key}-4"
+ expect(jira_import.label.title).to eq(label_title)
+ end
+ end
end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 86f37e9204c..163ca0b9bc3 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2604,6 +2604,7 @@ describe NotificationService, :mailer do
pages_domain_disabled
pages_domain_verification_succeeded
pages_domain_verification_failed
+ pages_domain_auto_ssl_failed
].each do |sym|
describe "##{sym}" do
subject(:notify!) { notification.send(sym, domain) }
diff --git a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
index 163276db7e6..63fd0978c97 100644
--- a/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
+++ b/spec/services/pages_domains/obtain_lets_encrypt_certificate_service_spec.rb
@@ -180,5 +180,13 @@ describe PagesDomains::ObtainLetsEncryptCertificateService do
expect(PagesDomainAcmeOrder.find_by_id(existing_order.id)).to be_nil
end
+
+ it 'sends notification' do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).to receive(:pages_domain_auto_ssl_failed).with(pages_domain)
+ end
+
+ service.execute
+ end
end
end
diff --git a/spec/services/pages_domains/retry_acme_order_service_spec.rb b/spec/services/pages_domains/retry_acme_order_service_spec.rb
new file mode 100644
index 00000000000..0185f10864c
--- /dev/null
+++ b/spec/services/pages_domains/retry_acme_order_service_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+describe PagesDomains::RetryAcmeOrderService do
+ let(:domain) { create(:pages_domain, auto_ssl_enabled: true, auto_ssl_failed: true) }
+
+ let(:service) { described_class.new(domain) }
+
+ it 'clears auto_ssl_failed' do
+ expect do
+ service.execute
+ end.to change { domain.auto_ssl_failed }.from(true).to(false)
+ end
+
+ it 'schedules renewal worker' do
+ expect(PagesDomainSslRenewalWorker).to receive(:perform_async).with(domain.id).and_return(nil).once
+
+ service.execute
+ end
+
+ it "doesn't schedule renewal worker if Let's Encrypt integration is not enabled" do
+ domain.update!(auto_ssl_enabled: false)
+
+ expect(PagesDomainSslRenewalWorker).not_to receive(:new)
+
+ service.execute
+ end
+
+ it "doesn't schedule renewal worker if auto ssl has not failed yet" do
+ domain.update!(auto_ssl_failed: false)
+
+ expect(PagesDomainSslRenewalWorker).not_to receive(:new)
+
+ service.execute
+ end
+end
diff --git a/spec/services/projects/fork_service_spec.rb b/spec/services/projects/fork_service_spec.rb
index 443e3dfddf1..c8354f6ba4e 100644
--- a/spec/services/projects/fork_service_spec.rb
+++ b/spec/services/projects/fork_service_spec.rb
@@ -311,6 +311,8 @@ describe Projects::ForkService do
fork_before_move = fork_project(project)
# Stub everything required to move a project to a Gitaly shard that does not exist
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
stub_storage_settings('test_second_storage' => { 'path' => TestEnv::SECOND_STORAGE_PATH })
allow_any_instance_of(Gitlab::Git::Repository).to receive(:create_repository)
.and_return(true)
diff --git a/spec/services/projects/update_repository_storage_service_spec.rb b/spec/services/projects/update_repository_storage_service_spec.rb
index 23ce6f9165d..05555fa76f7 100644
--- a/spec/services/projects/update_repository_storage_service_spec.rb
+++ b/spec/services/projects/update_repository_storage_service_spec.rb
@@ -20,6 +20,8 @@ describe Projects::UpdateRepositoryStorageService do
let(:project_repository_double) { double(:repository) }
before do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
allow(Gitlab::Git::Repository).to receive(:new).and_call_original
allow(Gitlab::Git::Repository).to receive(:new)
.with('test_second_storage', project.repository.raw.relative_path, project.repository.gl_repository, project.repository.full_path)
@@ -49,17 +51,20 @@ describe Projects::UpdateRepositoryStorageService do
end
end
- context 'when the project is already on the target storage' do
+ context 'when the filesystems are the same' do
it 'bails out and does nothing' do
result = subject.execute(project.repository_storage)
expect(result[:status]).to eq(:error)
- expect(result[:message]).to match(/repository and source have the same storage/)
+ expect(result[:message]).to match(/SameFilesystemError/)
end
end
context 'when the move fails' do
it 'unmarks the repository as read-only without updating the repository storage' do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+
expect(project_repository_double).to receive(:create_repository)
.and_return(true)
expect(project_repository_double).to receive(:replicate)
@@ -77,6 +82,9 @@ describe Projects::UpdateRepositoryStorageService do
context 'when the checksum does not match' do
it 'unmarks the repository as read-only without updating the repository storage' do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+
expect(project_repository_double).to receive(:create_repository)
.and_return(true)
expect(project_repository_double).to receive(:replicate)
@@ -97,6 +105,9 @@ describe Projects::UpdateRepositoryStorageService do
let!(:pool) { create(:pool_repository, :ready, source_project: project) }
it 'leaves the pool' do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+
expect(project_repository_double).to receive(:create_repository)
.and_return(true)
expect(project_repository_double).to receive(:replicate)
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 0e6078cc444..19d12a0f5cb 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -91,6 +91,10 @@ RSpec.configure do |config|
match = location.match(%r{/spec/([^/]+)/})
metadata[:type] = match[1].singularize.to_sym if match
end
+
+ # Admin controller specs get auto admin mode enabled since they are
+ # protected by the 'EnforcesAdminAuthentication' concern
+ metadata[:enable_admin_mode] = true if location =~ %r{(ee)?/spec/controllers/admin/}
end
config.include LicenseHelpers
@@ -226,7 +230,6 @@ RSpec.configure do |config|
#
# context 'some test in mocked dir', :do_not_mock_admin_mode do ... end
admin_mode_mock_dirs = %w(
- ./ee/spec/controllers
./ee/spec/elastic_integration
./ee/spec/features
./ee/spec/finders
@@ -238,7 +241,6 @@ RSpec.configure do |config|
./ee/spec/services
./ee/spec/support/protected_tags
./ee/spec/support/shared_examples
- ./spec/controllers
./spec/features
./spec/finders
./spec/frontend
@@ -270,7 +272,7 @@ RSpec.configure do |config|
# context 'some test that requires admin mode', :enable_admin_mode do ... end
#
# See also spec/support/helpers/admin_mode_helpers.rb
- if example.metadata[:enable_admin_mode]
+ if example.metadata[:enable_admin_mode] && !example.metadata[:do_not_mock_admin_mode]
allow_any_instance_of(Gitlab::Auth::CurrentUserMode).to receive(:admin_mode?) do |current_user_mode|
current_user_mode.send(:user)&.admin?
end
diff --git a/spec/support/helpers/features/web_ide_spec_helpers.rb b/spec/support/helpers/features/web_ide_spec_helpers.rb
new file mode 100644
index 00000000000..37c8345a4e5
--- /dev/null
+++ b/spec/support/helpers/features/web_ide_spec_helpers.rb
@@ -0,0 +1,148 @@
+# frozen_string_literal: true
+
+# These helpers help you interact within the Web IDE.
+#
+# Usage:
+# describe "..." do
+# include WebIdeSpecHelpers
+# ...
+#
+# ide_visit(project)
+# ide_create_new_file('path/to/file.txt', content: 'Lorem ipsum')
+# ide_commit
+#
+module WebIdeSpecHelpers
+ include ActionView::Helpers::JavaScriptHelper
+
+ def ide_visit(project)
+ visit project_path(project)
+
+ wait_for_requests
+
+ click_link('Web IDE')
+
+ wait_for_requests
+ end
+
+ def ide_tree_body
+ page.find('.ide-tree-body')
+ end
+
+ def ide_tree_actions
+ page.find('.ide-tree-actions')
+ end
+
+ def ide_file_row_open?(row)
+ row.matches_css?('.is-open')
+ end
+
+ # Creates a file in the IDE by expanding directories
+ # then using the dropdown next to the parent directory
+ #
+ # - Throws an error if the parent directory is not found
+ def ide_create_new_file(path, content: '')
+ parent_path = path.split('/')[0...-1].join('/')
+
+ container = ide_traverse_to_file(parent_path)
+
+ if container
+ click_file_action(container, 'New file')
+ else
+ ide_tree_actions.click_button('New file')
+ end
+
+ within '#ide-new-entry' do
+ find('input').fill_in(with: path)
+ click_button('Create file')
+ end
+
+ ide_set_editor_value(content)
+ end
+
+ # Deletes a file by traversing to `path`
+ # then clicking the 'Delete' action.
+ #
+ # - Throws an error if the file is not found
+ def ide_delete_file(path)
+ container = ide_traverse_to_file(path)
+
+ click_file_action(container, 'Delete')
+ end
+
+ # Opens parent directories until the file at `path`
+ # is exposed.
+ #
+ # - Returns a reference to the file row at `path`
+ # - Throws an error if the file is not found
+ def ide_traverse_to_file(path)
+ paths = path.split('/')
+ container = nil
+
+ paths.each_with_index do |path, index|
+ ide_open_file_row(container) if container
+ container = find_file_child(container, path, level: index)
+ end
+
+ container
+ end
+
+ def ide_open_file_row(row)
+ return if ide_file_row_open?(row)
+
+ row.click
+ end
+
+ def ide_set_editor_value(value)
+ editor = find('.monaco-editor')
+ uri = editor['data-uri']
+
+ execute_script("monaco.editor.getModel('#{uri}').setValue('#{escape_javascript(value)}')")
+ end
+
+ def ide_editor_value
+ editor = find('.monaco-editor')
+ uri = editor['data-uri']
+
+ evaluate_script("monaco.editor.getModel('#{uri}').getValue()")
+ end
+
+ def ide_commit
+ ide_switch_mode('commit')
+
+ commit_to_current_branch
+ end
+
+ def ide_switch_mode(mode)
+ find(".js-ide-#{mode}-mode").click
+ end
+
+ private
+
+ def file_row_container(row)
+ row ? row.find(:xpath, '..') : ide_tree_body
+ end
+
+ def find_file_child(row, name, level: nil)
+ container = file_row_container(row)
+ container.find(".file-row[data-level=\"#{level}\"]", text: name)
+ end
+
+ def click_file_action(row, text)
+ row.hover
+ dropdown = row.find('.ide-new-btn')
+ dropdown.find('button').click
+ dropdown.find('button', text: text).click
+ end
+
+ def commit_to_current_branch(option: 'Commit to master branch', message: '')
+ within '.multi-file-commit-form' do
+ fill_in('commit-message', with: message) if message
+
+ choose(option)
+
+ click_button('Commit')
+
+ wait_for_requests
+ end
+ end
+end
diff --git a/spec/support/services/deploy_token_shared_examples.rb b/spec/support/services/deploy_token_shared_examples.rb
index 9d681970739..adc5ea0fcdc 100644
--- a/spec/support/services/deploy_token_shared_examples.rb
+++ b/spec/support/services/deploy_token_shared_examples.rb
@@ -46,7 +46,7 @@ RSpec.shared_examples 'a deploy token creation service' do
end
context 'when the deploy token is invalid' do
- let(:deploy_token_params) { attributes_for(:deploy_token, read_repository: false, read_registry: false) }
+ let(:deploy_token_params) { attributes_for(:deploy_token, read_repository: false, read_registry: false, write_registry: false) }
it 'does not create a new DeployToken' do
expect { subject }.not_to change { DeployToken.count }
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 9ffe13545f7..e4bc44c9d32 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -1,13 +1,6 @@
# frozen_string_literal: true
RSpec.shared_context 'project navbar structure' do
- let(:requirements_nav_item) do
- {
- nav_item: _('Requirements'),
- nav_sub_items: [_('List')]
- }
- end
-
let(:analytics_nav_item) do
{
nav_item: _('Analytics'),
@@ -56,7 +49,6 @@ RSpec.shared_context 'project navbar structure' do
nav_item: _('Merge Requests'),
nav_sub_items: []
},
- (requirements_nav_item if Gitlab.ee?),
{
nav_item: _('CI / CD'),
nav_sub_items: [
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index 4f81a71f586..c2797c49c02 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -14,16 +14,17 @@ RSpec.shared_context 'GroupPolicy context' do
%i[
read_label read_group upload_file read_namespace read_group_activity
read_group_issues read_group_boards read_group_labels read_group_milestones
- read_group_merge_requests
+ read_group_merge_requests read_wiki
]
end
let(:read_group_permissions) { %i[read_label read_list read_milestone read_board] }
- let(:reporter_permissions) { %i[admin_label read_container_image read_metrics_dashboard_annotation] }
- let(:developer_permissions) { %i[admin_milestone create_metrics_dashboard_annotation delete_metrics_dashboard_annotation update_metrics_dashboard_annotation] }
+ let(:reporter_permissions) { %i[admin_label read_container_image read_metrics_dashboard_annotation download_wiki_code] }
+ let(:developer_permissions) { %i[admin_milestone create_metrics_dashboard_annotation delete_metrics_dashboard_annotation update_metrics_dashboard_annotation create_wiki] }
let(:maintainer_permissions) do
%i[
create_projects
read_cluster create_cluster update_cluster admin_cluster add_cluster
+ admin_wiki
]
end
let(:owner_permissions) do
diff --git a/spec/support/shared_examples/policies/wiki_policies_shared_examples.rb b/spec/support/shared_examples/policies/wiki_policies_shared_examples.rb
new file mode 100644
index 00000000000..b91500ffd9c
--- /dev/null
+++ b/spec/support/shared_examples/policies/wiki_policies_shared_examples.rb
@@ -0,0 +1,152 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'model with wiki policies' do
+ let(:container) { raise NotImplementedError }
+ let(:permissions) { %i(read_wiki create_wiki update_wiki admin_wiki download_wiki_code) }
+
+ # TODO: Remove this helper once we implement group features
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/208412
+ def set_access_level(access_level)
+ raise NotImplementedError
+ end
+
+ subject { described_class.new(owner, container) }
+
+ context 'when the feature is disabled' do
+ before do
+ set_access_level(ProjectFeature::DISABLED)
+ end
+
+ it 'does not include the wiki permissions' do
+ expect_disallowed(*permissions)
+ end
+
+ context 'when there is an external wiki' do
+ it 'does not include the wiki permissions' do
+ allow(container).to receive(:has_external_wiki?).and_return(true)
+
+ expect_disallowed(*permissions)
+ end
+ end
+ end
+
+ describe 'read_wiki' do
+ subject { described_class.new(user, container) }
+
+ member_roles = %i[guest developer]
+ stranger_roles = %i[anonymous non_member]
+
+ user_roles = stranger_roles + member_roles
+
+ # When a user is anonymous, their `current_user == nil`
+ let(:user) { create(:user) unless user_role == :anonymous }
+
+ before do
+ container.visibility = container_visibility
+ set_access_level(wiki_access_level)
+ container.add_user(user, user_role) if member_roles.include?(user_role)
+ end
+
+ title = ->(container_visibility, wiki_access_level, user_role) do
+ [
+ "container is #{Gitlab::VisibilityLevel.level_name container_visibility}",
+ "wiki is #{ProjectFeature.str_from_access_level wiki_access_level}",
+ "user is #{user_role}"
+ ].join(', ')
+ end
+
+ describe 'Situations where :read_wiki is always false' do
+ where(case_names: title,
+ container_visibility: Gitlab::VisibilityLevel.options.values,
+ wiki_access_level: [ProjectFeature::DISABLED],
+ user_role: user_roles)
+
+ with_them do
+ it { is_expected.to be_disallowed(:read_wiki) }
+ end
+ end
+
+ describe 'Situations where :read_wiki is always true' do
+ where(case_names: title,
+ container_visibility: [Gitlab::VisibilityLevel::PUBLIC],
+ wiki_access_level: [ProjectFeature::ENABLED],
+ user_role: user_roles)
+
+ with_them do
+ it { is_expected.to be_allowed(:read_wiki) }
+ end
+ end
+
+ describe 'Situations where :read_wiki requires membership' do
+ context 'the wiki is private, and the user is a member' do
+ where(case_names: title,
+ container_visibility: [Gitlab::VisibilityLevel::PUBLIC,
+ Gitlab::VisibilityLevel::INTERNAL],
+ wiki_access_level: [ProjectFeature::PRIVATE],
+ user_role: member_roles)
+
+ with_them do
+ it { is_expected.to be_allowed(:read_wiki) }
+ end
+ end
+
+ context 'the wiki is private, and the user is not member' do
+ where(case_names: title,
+ container_visibility: [Gitlab::VisibilityLevel::PUBLIC,
+ Gitlab::VisibilityLevel::INTERNAL],
+ wiki_access_level: [ProjectFeature::PRIVATE],
+ user_role: stranger_roles)
+
+ with_them do
+ it { is_expected.to be_disallowed(:read_wiki) }
+ end
+ end
+
+ context 'the wiki is enabled, and the user is a member' do
+ where(case_names: title,
+ container_visibility: [Gitlab::VisibilityLevel::PRIVATE],
+ wiki_access_level: [ProjectFeature::ENABLED],
+ user_role: member_roles)
+
+ with_them do
+ it { is_expected.to be_allowed(:read_wiki) }
+ end
+ end
+
+ context 'the wiki is enabled, and the user is not a member' do
+ where(case_names: title,
+ container_visibility: [Gitlab::VisibilityLevel::PRIVATE],
+ wiki_access_level: [ProjectFeature::ENABLED],
+ user_role: stranger_roles)
+
+ with_them do
+ it { is_expected.to be_disallowed(:read_wiki) }
+ end
+ end
+ end
+
+ describe 'Situations where :read_wiki prohibits anonymous access' do
+ context 'the user is not anonymous' do
+ where(case_names: title,
+ container_visibility: [Gitlab::VisibilityLevel::INTERNAL],
+ wiki_access_level: [ProjectFeature::ENABLED, ProjectFeature::PUBLIC],
+ user_role: user_roles.reject { |u| u == :anonymous })
+
+ with_them do
+ it { is_expected.to be_allowed(:read_wiki) }
+ end
+ end
+
+ context 'the user is anonymous' do
+ where(case_names: title,
+ container_visibility: [Gitlab::VisibilityLevel::INTERNAL],
+ wiki_access_level: [ProjectFeature::ENABLED, ProjectFeature::PUBLIC],
+ user_role: %i[anonymous])
+
+ with_them do
+ it { is_expected.to be_disallowed(:read_wiki) }
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
index b22379b8b68..d6166ac8188 100644
--- a/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/projects/update_repository_storage_service_shared_examples.rb
@@ -22,6 +22,9 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context 'when the move succeeds', :clean_gitlab_redis_shared_state do
before do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
+
allow(project_repository_double).to receive(:create_repository)
.and_return(true)
allow(project_repository_double).to receive(:replicate)
@@ -83,17 +86,19 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
end
end
- context 'when the project is already on the target storage' do
+ context 'when the filesystems are the same' do
it 'bails out and does nothing' do
result = subject.execute(project.repository_storage)
expect(result[:status]).to eq(:error)
- expect(result[:message]).to match(/repository and source have the same storage/)
+ expect(result[:message]).to match(/SameFilesystemError/)
end
end
context "when the move of the #{repository_type} repository fails" do
it 'unmarks the repository as read-only without updating the repository storage' do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
allow(project_repository_double).to receive(:create_repository)
.and_return(true)
allow(project_repository_double).to receive(:replicate)
@@ -119,6 +124,8 @@ RSpec.shared_examples 'moves repository to another storage' do |repository_type|
context "when the checksum of the #{repository_type} repository does not match" do
it 'unmarks the repository as read-only without updating the repository storage' do
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
+ allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('test_second_storage').and_return(SecureRandom.uuid)
allow(project_repository_double).to receive(:create_repository)
.and_return(true)
allow(project_repository_double).to receive(:replicate)
diff --git a/spec/uploaders/records_uploads_spec.rb b/spec/uploaders/records_uploads_spec.rb
index 71eff23c77c..140595e58ad 100644
--- a/spec/uploaders/records_uploads_spec.rb
+++ b/spec/uploaders/records_uploads_spec.rb
@@ -78,7 +78,8 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: build_stubbed(:user),
- uploader: uploader.class.to_s
+ uploader: uploader.class.to_s,
+ store: ::ObjectStorage::Store::LOCAL
)
uploader.upload = existing
@@ -98,7 +99,8 @@ describe RecordsUploads do
path: File.join('uploads', 'rails_sample.jpg'),
size: 512.kilobytes,
model: project,
- uploader: uploader.class.to_s
+ uploader: uploader.class.to_s,
+ store: ::ObjectStorage::Store::LOCAL
)
uploader.store!(upload_fixture('rails_sample.jpg'))
diff --git a/spec/views/projects/pages/show.html.haml_spec.rb b/spec/views/projects/pages/show.html.haml_spec.rb
index 80410e7bc32..63b66616f31 100644
--- a/spec/views/projects/pages/show.html.haml_spec.rb
+++ b/spec/views/projects/pages/show.html.haml_spec.rb
@@ -17,7 +17,7 @@ describe 'projects/pages/show' do
assign(:project, project)
allow(view).to receive(:current_user).and_return(user)
- assign(:domains, [domain])
+ assign(:domains, [domain.present(current_user: user)])
end
describe 'validation warning' do
@@ -47,7 +47,7 @@ describe 'projects/pages/show' do
describe "warning about failed Let's Encrypt" do
let(:error_message) do
- "Something went wrong while obtaining Let's Encrypt certificate for #{domain.domain}. "\
+ "Something went wrong while obtaining the Let's Encrypt certificate for #{domain.domain}. "\
"To retry visit your domain details."
end
diff --git a/spec/views/projects/pages_domains/show.html.haml_spec.rb b/spec/views/projects/pages_domains/show.html.haml_spec.rb
index 51c7a08fe96..7d502e74d10 100644
--- a/spec/views/projects/pages_domains/show.html.haml_spec.rb
+++ b/spec/views/projects/pages_domains/show.html.haml_spec.rb
@@ -7,7 +7,7 @@ describe 'projects/pages_domains/show' do
before do
assign(:project, project)
- assign(:domain, domain)
+ assign(:domain, domain.present)
stub_pages_setting(external_https: true)
end
diff --git a/spec/workers/concerns/project_import_options_spec.rb b/spec/workers/concerns/project_import_options_spec.rb
index c5fbcfb5fb0..3ccfb21b653 100644
--- a/spec/workers/concerns/project_import_options_spec.rb
+++ b/spec/workers/concerns/project_import_options_spec.rb
@@ -39,6 +39,17 @@ describe ProjectImportOptions do
expect(project.import_state.reload.last_error).to include("import")
end
+ context 'when project is jira import' do
+ let(:project) { create(:project, import_type: 'jira') }
+ let!(:jira_import) { create(:jira_import_state, project: project) }
+
+ it 'logs the appropriate error message for forked projects' do
+ worker_class.sidekiq_retries_exhausted_block.call(job)
+
+ expect(project.latest_jira_import.reload.status).to eq('failed')
+ end
+ end
+
context 'when project does not have import_state' do
let(:project) { create(:project) }
diff --git a/spec/workers/project_update_repository_storage_worker_spec.rb b/spec/workers/project_update_repository_storage_worker_spec.rb
index ed99b8135c2..57a4c2128b3 100644
--- a/spec/workers/project_update_repository_storage_worker_spec.rb
+++ b/spec/workers/project_update_repository_storage_worker_spec.rb
@@ -9,33 +9,12 @@ describe ProjectUpdateRepositoryStorageWorker do
subject { described_class.new }
describe "#perform" do
- context 'when source and target repositories are on different filesystems' do
- before do
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('default').and_call_original
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).with('new_storage').and_return(SecureRandom.uuid)
+ it "calls the update repository storage service" do
+ expect_next_instance_of(Projects::UpdateRepositoryStorageService) do |instance|
+ expect(instance).to receive(:execute).with('new_storage')
end
- it "calls the update repository storage service" do
- expect_next_instance_of(Projects::UpdateRepositoryStorageService) do |instance|
- expect(instance).to receive(:execute).with('new_storage')
- end
-
- subject.perform(project.id, 'new_storage')
- end
- end
-
- context 'when source and target repositories are on the same filesystems' do
- let(:filesystem_id) { SecureRandom.uuid }
-
- before do
- allow(Gitlab::GitalyClient).to receive(:filesystem_id).and_return(filesystem_id)
- end
-
- it 'raises an error' do
- expect_any_instance_of(::Projects::UpdateRepositoryStorageService).not_to receive(:new)
-
- expect { subject.perform(project.id, 'new_storage') }.to raise_error(ProjectUpdateRepositoryStorageWorker::SameFilesystemError)
- end
+ subject.perform(project.id, 'new_storage')
end
end
end
diff --git a/yarn.lock b/yarn.lock
index 42a5c0d29c3..81bbf8a59e3 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1036,10 +1036,10 @@
"@sentry/types" "5.10.0"
tslib "^1.9.3"
-"@sourcegraph/code-host-integration@0.0.34":
- version "0.0.34"
- resolved "https://registry.yarnpkg.com/@sourcegraph/code-host-integration/-/code-host-integration-0.0.34.tgz#c8f94854d64fe035926bbda7bed3a538a7259d03"
- integrity sha512-TAa5kU/zPb9PfB4HIhaEDhKKdW5Fx9YVx9WWMOwz9elD0y9FZoAXDO1o4Pz1cm1IP/VZwd8csypAWgfxsAmfzw==
+"@sourcegraph/code-host-integration@0.0.36":
+ version "0.0.36"
+ resolved "https://registry.yarnpkg.com/@sourcegraph/code-host-integration/-/code-host-integration-0.0.36.tgz#2f4d287840ac2944c78ef92f10f0db0ef8a077fa"
+ integrity sha512-Hpj1xiVhPxMsjLNre9MrYYAM1SPOWPE9yG9SPtz4dqYzc6/ycaPGyr+ljcaWEclS9hZCvkk4+qVC5WONpYVjyA==
"@types/anymatch@*":
version "1.3.0"